var/home/core/zuul-output/0000755000175000017500000000000015116550376014536 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015116552456015503 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000001252356715116552447017722 0ustar rootrootDec 11 14:14:27 crc systemd[1]: Starting Kubernetes Kubelet... Dec 11 14:14:27 crc restorecon[4689]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:27 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 14:14:28 crc restorecon[4689]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 11 14:14:28 crc restorecon[4689]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 11 14:14:28 crc kubenswrapper[4690]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 14:14:28 crc kubenswrapper[4690]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 11 14:14:28 crc kubenswrapper[4690]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 14:14:28 crc kubenswrapper[4690]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 14:14:28 crc kubenswrapper[4690]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 11 14:14:28 crc kubenswrapper[4690]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.479718 4690 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483236 4690 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483259 4690 feature_gate.go:330] unrecognized feature gate: Example Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483264 4690 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483269 4690 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483273 4690 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483285 4690 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483291 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483296 4690 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483300 4690 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483303 4690 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483308 4690 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483312 4690 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483316 4690 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483320 4690 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483323 4690 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483326 4690 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483330 4690 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483333 4690 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483337 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483340 4690 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483344 4690 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483349 4690 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483353 4690 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483357 4690 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483360 4690 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483364 4690 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483367 4690 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483371 4690 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483374 4690 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483378 4690 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483382 4690 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483386 4690 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483391 4690 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483402 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483407 4690 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483412 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483417 4690 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483421 4690 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483425 4690 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483430 4690 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483442 4690 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483447 4690 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483451 4690 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483455 4690 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483459 4690 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483463 4690 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483467 4690 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483471 4690 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483475 4690 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483479 4690 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483483 4690 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483487 4690 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483492 4690 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483497 4690 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483501 4690 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483505 4690 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483509 4690 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483513 4690 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483516 4690 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483520 4690 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483523 4690 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483527 4690 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483531 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483534 4690 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483537 4690 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483541 4690 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483544 4690 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483548 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483552 4690 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483557 4690 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.483561 4690 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483651 4690 flags.go:64] FLAG: --address="0.0.0.0" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483660 4690 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483669 4690 flags.go:64] FLAG: --anonymous-auth="true" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483675 4690 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483680 4690 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483690 4690 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483700 4690 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483705 4690 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483710 4690 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483714 4690 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483718 4690 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483723 4690 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483727 4690 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483731 4690 flags.go:64] FLAG: --cgroup-root="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483735 4690 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483739 4690 flags.go:64] FLAG: --client-ca-file="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483743 4690 flags.go:64] FLAG: --cloud-config="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483746 4690 flags.go:64] FLAG: --cloud-provider="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483750 4690 flags.go:64] FLAG: --cluster-dns="[]" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483769 4690 flags.go:64] FLAG: --cluster-domain="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483774 4690 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483778 4690 flags.go:64] FLAG: --config-dir="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483782 4690 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483787 4690 flags.go:64] FLAG: --container-log-max-files="5" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483792 4690 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483796 4690 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483800 4690 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483804 4690 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483808 4690 flags.go:64] FLAG: --contention-profiling="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483812 4690 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483816 4690 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483821 4690 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483824 4690 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483829 4690 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483833 4690 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483837 4690 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483842 4690 flags.go:64] FLAG: --enable-load-reader="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483846 4690 flags.go:64] FLAG: --enable-server="true" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483851 4690 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483859 4690 flags.go:64] FLAG: --event-burst="100" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483864 4690 flags.go:64] FLAG: --event-qps="50" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483876 4690 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483880 4690 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483884 4690 flags.go:64] FLAG: --eviction-hard="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483889 4690 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483893 4690 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483897 4690 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483901 4690 flags.go:64] FLAG: --eviction-soft="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483905 4690 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483909 4690 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483913 4690 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483917 4690 flags.go:64] FLAG: --experimental-mounter-path="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483921 4690 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483925 4690 flags.go:64] FLAG: --fail-swap-on="true" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483928 4690 flags.go:64] FLAG: --feature-gates="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483933 4690 flags.go:64] FLAG: --file-check-frequency="20s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483937 4690 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483942 4690 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483946 4690 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483963 4690 flags.go:64] FLAG: --healthz-port="10248" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483969 4690 flags.go:64] FLAG: --help="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483973 4690 flags.go:64] FLAG: --hostname-override="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483977 4690 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483981 4690 flags.go:64] FLAG: --http-check-frequency="20s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483985 4690 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483989 4690 flags.go:64] FLAG: --image-credential-provider-config="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483992 4690 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.483996 4690 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484000 4690 flags.go:64] FLAG: --image-service-endpoint="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484004 4690 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484010 4690 flags.go:64] FLAG: --kube-api-burst="100" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484014 4690 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484018 4690 flags.go:64] FLAG: --kube-api-qps="50" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484022 4690 flags.go:64] FLAG: --kube-reserved="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484026 4690 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484030 4690 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484034 4690 flags.go:64] FLAG: --kubelet-cgroups="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484044 4690 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484048 4690 flags.go:64] FLAG: --lock-file="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484052 4690 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484056 4690 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484060 4690 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484067 4690 flags.go:64] FLAG: --log-json-split-stream="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484071 4690 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484075 4690 flags.go:64] FLAG: --log-text-split-stream="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484079 4690 flags.go:64] FLAG: --logging-format="text" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484084 4690 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484088 4690 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484092 4690 flags.go:64] FLAG: --manifest-url="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484097 4690 flags.go:64] FLAG: --manifest-url-header="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484104 4690 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484109 4690 flags.go:64] FLAG: --max-open-files="1000000" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484116 4690 flags.go:64] FLAG: --max-pods="110" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484121 4690 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484126 4690 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484131 4690 flags.go:64] FLAG: --memory-manager-policy="None" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484135 4690 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484140 4690 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484145 4690 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484150 4690 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484163 4690 flags.go:64] FLAG: --node-status-max-images="50" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484169 4690 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484179 4690 flags.go:64] FLAG: --oom-score-adj="-999" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484184 4690 flags.go:64] FLAG: --pod-cidr="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484188 4690 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484198 4690 flags.go:64] FLAG: --pod-manifest-path="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484202 4690 flags.go:64] FLAG: --pod-max-pids="-1" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484207 4690 flags.go:64] FLAG: --pods-per-core="0" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484211 4690 flags.go:64] FLAG: --port="10250" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484215 4690 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484219 4690 flags.go:64] FLAG: --provider-id="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484223 4690 flags.go:64] FLAG: --qos-reserved="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484227 4690 flags.go:64] FLAG: --read-only-port="10255" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484236 4690 flags.go:64] FLAG: --register-node="true" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484241 4690 flags.go:64] FLAG: --register-schedulable="true" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484244 4690 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484255 4690 flags.go:64] FLAG: --registry-burst="10" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484259 4690 flags.go:64] FLAG: --registry-qps="5" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484263 4690 flags.go:64] FLAG: --reserved-cpus="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484267 4690 flags.go:64] FLAG: --reserved-memory="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484272 4690 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484276 4690 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484281 4690 flags.go:64] FLAG: --rotate-certificates="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484284 4690 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484289 4690 flags.go:64] FLAG: --runonce="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484293 4690 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484297 4690 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484301 4690 flags.go:64] FLAG: --seccomp-default="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484305 4690 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484309 4690 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484313 4690 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484317 4690 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484321 4690 flags.go:64] FLAG: --storage-driver-password="root" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484325 4690 flags.go:64] FLAG: --storage-driver-secure="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484331 4690 flags.go:64] FLAG: --storage-driver-table="stats" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484335 4690 flags.go:64] FLAG: --storage-driver-user="root" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484339 4690 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484343 4690 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484347 4690 flags.go:64] FLAG: --system-cgroups="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484351 4690 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484357 4690 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484361 4690 flags.go:64] FLAG: --tls-cert-file="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484365 4690 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484373 4690 flags.go:64] FLAG: --tls-min-version="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484377 4690 flags.go:64] FLAG: --tls-private-key-file="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484381 4690 flags.go:64] FLAG: --topology-manager-policy="none" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484385 4690 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484389 4690 flags.go:64] FLAG: --topology-manager-scope="container" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484393 4690 flags.go:64] FLAG: --v="2" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484404 4690 flags.go:64] FLAG: --version="false" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484410 4690 flags.go:64] FLAG: --vmodule="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484415 4690 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484419 4690 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484552 4690 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484560 4690 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484565 4690 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484569 4690 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484573 4690 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484577 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484581 4690 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484585 4690 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484588 4690 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484592 4690 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484597 4690 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484601 4690 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484605 4690 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484612 4690 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484616 4690 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484620 4690 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484623 4690 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484627 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484631 4690 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484634 4690 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484639 4690 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484642 4690 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484647 4690 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484651 4690 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484655 4690 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484658 4690 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484662 4690 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484665 4690 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484668 4690 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484672 4690 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484675 4690 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484679 4690 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484688 4690 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484692 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484695 4690 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484698 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484703 4690 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484706 4690 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484710 4690 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484713 4690 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484717 4690 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484721 4690 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484724 4690 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484727 4690 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484731 4690 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484736 4690 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484739 4690 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484743 4690 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484746 4690 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484749 4690 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484753 4690 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484757 4690 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484762 4690 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484766 4690 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484769 4690 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484773 4690 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484777 4690 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484781 4690 feature_gate.go:330] unrecognized feature gate: Example Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484784 4690 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484789 4690 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484793 4690 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484797 4690 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484801 4690 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484804 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484808 4690 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484811 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484814 4690 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484818 4690 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484827 4690 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484831 4690 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.484834 4690 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.484846 4690 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.494792 4690 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.494827 4690 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.494924 4690 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.494934 4690 feature_gate.go:330] unrecognized feature gate: Example Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.494939 4690 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.494945 4690 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.494967 4690 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.494972 4690 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.494977 4690 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.494982 4690 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.494986 4690 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.494990 4690 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.494995 4690 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495000 4690 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495006 4690 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495012 4690 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495019 4690 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495025 4690 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495031 4690 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495037 4690 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495042 4690 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495048 4690 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495053 4690 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495058 4690 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495062 4690 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495067 4690 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495072 4690 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495077 4690 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495081 4690 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495087 4690 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495095 4690 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495101 4690 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495106 4690 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495111 4690 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495116 4690 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495121 4690 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495126 4690 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495131 4690 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495136 4690 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495141 4690 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495145 4690 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495150 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495154 4690 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495159 4690 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495165 4690 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495171 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495176 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495181 4690 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495186 4690 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495190 4690 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495196 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495201 4690 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495207 4690 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495212 4690 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495217 4690 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495221 4690 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495225 4690 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495230 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495234 4690 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495239 4690 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495243 4690 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495248 4690 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495254 4690 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495260 4690 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495265 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495270 4690 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495274 4690 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495278 4690 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495282 4690 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495286 4690 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495291 4690 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495295 4690 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495299 4690 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.495307 4690 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495791 4690 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495801 4690 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495806 4690 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495811 4690 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495816 4690 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495821 4690 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495826 4690 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495831 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495836 4690 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495841 4690 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495847 4690 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495853 4690 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495858 4690 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495863 4690 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495869 4690 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495874 4690 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495880 4690 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495888 4690 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495893 4690 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495898 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495902 4690 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495906 4690 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495919 4690 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495923 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495927 4690 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495931 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495935 4690 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495939 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495943 4690 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495947 4690 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495969 4690 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495974 4690 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495979 4690 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495983 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495988 4690 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495992 4690 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.495997 4690 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496002 4690 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496014 4690 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496018 4690 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496023 4690 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496027 4690 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496032 4690 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496037 4690 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496041 4690 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496046 4690 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496051 4690 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496055 4690 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496061 4690 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496066 4690 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496070 4690 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496075 4690 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496080 4690 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496084 4690 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496089 4690 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496093 4690 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496097 4690 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496102 4690 feature_gate.go:330] unrecognized feature gate: Example Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496107 4690 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496111 4690 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496115 4690 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496120 4690 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496124 4690 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496129 4690 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496136 4690 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496142 4690 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496148 4690 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496152 4690 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496158 4690 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496164 4690 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.496169 4690 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.496177 4690 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.496558 4690 server.go:940] "Client rotation is on, will bootstrap in background" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.501043 4690 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.501607 4690 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.502345 4690 server.go:997] "Starting client certificate rotation" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.502442 4690 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.502864 4690 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-18 19:28:27.611451221 +0000 UTC Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.503004 4690 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 173h13m59.108450273s for next certificate rotation Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.507335 4690 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.510022 4690 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.519511 4690 log.go:25] "Validated CRI v1 runtime API" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.535201 4690 log.go:25] "Validated CRI v1 image API" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.537807 4690 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.540594 4690 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-11-14-09-46-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.540659 4690 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.561861 4690 manager.go:217] Machine: {Timestamp:2025-12-11 14:14:28.560204971 +0000 UTC m=+0.175606654 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:bfcace2b-fa81-434a-8ffc-a516d4bf3604 BootID:01107f05-03d2-4f59-b4f7-8fb3bdca6067 Filesystems:[{Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:a8:16:e2 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:a8:16:e2 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:f8:09:ac Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:27:59:ee Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:25:a9:9f Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:3b:3b:7b Speed:-1 Mtu:1496} {Name:eth10 MacAddress:a6:ed:57:c8:06:6e Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:8a:48:ab:30:12:b2 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.562209 4690 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.562417 4690 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.563134 4690 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.563377 4690 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.563435 4690 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.563726 4690 topology_manager.go:138] "Creating topology manager with none policy" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.563742 4690 container_manager_linux.go:303] "Creating device plugin manager" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.564045 4690 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.564088 4690 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.564439 4690 state_mem.go:36] "Initialized new in-memory state store" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.564572 4690 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.565202 4690 kubelet.go:418] "Attempting to sync node with API server" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.565230 4690 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.565264 4690 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.565282 4690 kubelet.go:324] "Adding apiserver pod source" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.565302 4690 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.569394 4690 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.569900 4690 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.571549 4690 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.571543 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:28 crc kubenswrapper[4690]: E1211 14:14:28.571680 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.572253 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.572403 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.572427 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.572439 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.572462 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.572474 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.572488 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.572408 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:28 crc kubenswrapper[4690]: E1211 14:14:28.572586 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.572512 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.572660 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.572675 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.572693 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.572704 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.574003 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.574916 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.574934 4690 server.go:1280] "Started kubelet" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.575782 4690 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.575785 4690 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.577527 4690 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 11 14:14:28 crc systemd[1]: Started Kubernetes Kubelet. Dec 11 14:14:28 crc kubenswrapper[4690]: E1211 14:14:28.577779 4690 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.51:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.18802ec2e7b0d179 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 14:14:28.574892409 +0000 UTC m=+0.190294052,LastTimestamp:2025-12-11 14:14:28.574892409 +0000 UTC m=+0.190294052,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.578928 4690 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.578982 4690 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 11 14:14:28 crc kubenswrapper[4690]: E1211 14:14:28.579295 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.579351 4690 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.579368 4690 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.579516 4690 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 11 14:14:28 crc kubenswrapper[4690]: E1211 14:14:28.579638 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="200ms" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.579482 4690 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 09:37:16.165306383 +0000 UTC Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.580325 4690 factory.go:55] Registering systemd factory Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.580350 4690 factory.go:221] Registration of the systemd container factory successfully Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.580375 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:28 crc kubenswrapper[4690]: E1211 14:14:28.580462 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.581200 4690 factory.go:153] Registering CRI-O factory Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.581226 4690 factory.go:221] Registration of the crio container factory successfully Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.581300 4690 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.581341 4690 factory.go:103] Registering Raw factory Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.581368 4690 manager.go:1196] Started watching for new ooms in manager Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.581457 4690 server.go:460] "Adding debug handlers to kubelet server" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.582101 4690 manager.go:319] Starting recovery of all containers Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.590589 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.590730 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.590765 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.590782 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.590803 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.590831 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.590856 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.590872 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.590904 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.590922 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.590965 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.590986 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591010 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591032 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591054 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591113 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591135 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591154 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591177 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591205 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591221 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591240 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591259 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591281 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591303 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591325 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591349 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591373 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591648 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591666 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591687 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591711 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591734 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591749 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591761 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591812 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591830 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591849 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591871 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591890 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591911 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591927 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591945 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.591994 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592030 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592052 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592066 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592082 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592107 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592122 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592145 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592163 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592208 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592227 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592249 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592270 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592287 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592310 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592325 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592343 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592371 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592385 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592405 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592421 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592439 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592459 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592473 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592492 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592505 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592534 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592553 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592571 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592590 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592609 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592622 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592639 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592654 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592676 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592689 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592703 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592724 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592736 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592752 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592764 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592779 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592796 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592809 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592823 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592843 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592857 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592876 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592890 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592906 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592923 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592939 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.592982 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593001 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593011 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593030 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593103 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593184 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593222 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593244 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593267 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593316 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593340 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593366 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593394 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593413 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593438 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593460 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593479 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593503 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593520 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593542 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593558 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593576 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593589 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593601 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593615 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593627 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593640 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593654 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593665 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593678 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593689 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593740 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593751 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593762 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593778 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593792 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593808 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593821 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593832 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593845 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.593859 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596268 4690 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596365 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596397 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596418 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596441 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596460 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596479 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596496 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596524 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596549 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596598 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596616 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596632 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596648 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596665 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596680 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596698 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596713 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596729 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596744 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596758 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596773 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596790 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596805 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596821 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596838 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596860 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596877 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596895 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596913 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596932 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.596977 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597003 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597022 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597044 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597064 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597080 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597096 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597113 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597130 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597147 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597163 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597178 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597196 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597213 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597231 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597249 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597267 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597287 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597305 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597322 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597335 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597347 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597359 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597370 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597380 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597392 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597405 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597417 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597429 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597441 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597453 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597468 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597479 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597492 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597502 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597514 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597527 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597537 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597547 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597561 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597571 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597582 4690 reconstruct.go:97] "Volume reconstruction finished" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.597591 4690 reconciler.go:26] "Reconciler: start to sync state" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.603521 4690 manager.go:324] Recovery completed Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.619057 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.620617 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.620650 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.620660 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.621359 4690 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.621377 4690 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.621393 4690 state_mem.go:36] "Initialized new in-memory state store" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.627922 4690 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.629590 4690 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.629631 4690 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 11 14:14:28 crc kubenswrapper[4690]: I1211 14:14:28.629664 4690 kubelet.go:2335] "Starting kubelet main sync loop" Dec 11 14:14:28 crc kubenswrapper[4690]: E1211 14:14:28.629710 4690 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 11 14:14:28 crc kubenswrapper[4690]: E1211 14:14:28.679839 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 14:14:28 crc kubenswrapper[4690]: E1211 14:14:28.730174 4690 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 11 14:14:28 crc kubenswrapper[4690]: W1211 14:14:28.744763 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:28 crc kubenswrapper[4690]: E1211 14:14:28.744868 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Dec 11 14:14:28 crc kubenswrapper[4690]: E1211 14:14:28.780319 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 14:14:28 crc kubenswrapper[4690]: E1211 14:14:28.780898 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="400ms" Dec 11 14:14:28 crc kubenswrapper[4690]: E1211 14:14:28.880841 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 14:14:28 crc kubenswrapper[4690]: E1211 14:14:28.930700 4690 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 11 14:14:28 crc kubenswrapper[4690]: E1211 14:14:28.982528 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 14:14:29 crc kubenswrapper[4690]: E1211 14:14:29.083516 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 14:14:29 crc kubenswrapper[4690]: E1211 14:14:29.182608 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="800ms" Dec 11 14:14:29 crc kubenswrapper[4690]: E1211 14:14:29.184668 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 14:14:29 crc kubenswrapper[4690]: E1211 14:14:29.285068 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 14:14:29 crc kubenswrapper[4690]: E1211 14:14:29.331768 4690 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.372671 4690 policy_none.go:49] "None policy: Start" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.373577 4690 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.373611 4690 state_mem.go:35] "Initializing new in-memory state store" Dec 11 14:14:29 crc kubenswrapper[4690]: E1211 14:14:29.385606 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 14:14:29 crc kubenswrapper[4690]: E1211 14:14:29.486775 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 14:14:29 crc kubenswrapper[4690]: W1211 14:14:29.576092 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:29 crc kubenswrapper[4690]: E1211 14:14:29.576193 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.576265 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:29 crc kubenswrapper[4690]: W1211 14:14:29.577712 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:29 crc kubenswrapper[4690]: E1211 14:14:29.577841 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.580219 4690 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 11:14:37.0154957 +0000 UTC Dec 11 14:14:29 crc kubenswrapper[4690]: E1211 14:14:29.587457 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 14:14:29 crc kubenswrapper[4690]: E1211 14:14:29.688345 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 14:14:29 crc kubenswrapper[4690]: E1211 14:14:29.788598 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.822024 4690 manager.go:334] "Starting Device Plugin manager" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.822704 4690 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.822736 4690 server.go:79] "Starting device plugin registration server" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.823215 4690 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.823246 4690 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.823742 4690 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.823840 4690 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.823854 4690 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 11 14:14:29 crc kubenswrapper[4690]: E1211 14:14:29.830548 4690 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 11 14:14:29 crc kubenswrapper[4690]: W1211 14:14:29.917112 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:29 crc kubenswrapper[4690]: E1211 14:14:29.917175 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.923548 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.924933 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.925092 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.925161 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:29 crc kubenswrapper[4690]: I1211 14:14:29.925250 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 14:14:29 crc kubenswrapper[4690]: E1211 14:14:29.925849 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.51:6443: connect: connection refused" node="crc" Dec 11 14:14:29 crc kubenswrapper[4690]: E1211 14:14:29.983692 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="1.6s" Dec 11 14:14:30 crc kubenswrapper[4690]: W1211 14:14:30.061683 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:30 crc kubenswrapper[4690]: E1211 14:14:30.061829 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.126765 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.128643 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.128700 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.128716 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.128755 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 14:14:30 crc kubenswrapper[4690]: E1211 14:14:30.129353 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.51:6443: connect: connection refused" node="crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.132492 4690 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.132603 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.133593 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.133701 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.133790 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.134081 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.134577 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.134642 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.135847 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.135923 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.135995 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.135998 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.136766 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.137104 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.137178 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.137195 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.137243 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.137822 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.137886 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.137903 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.139764 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.140238 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.140302 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.140381 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.140407 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.140446 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.141753 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.141787 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.141798 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.142097 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.142207 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.142264 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.142371 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.142416 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.142443 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.142830 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.142883 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.142898 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.143071 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.143100 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.143113 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.143162 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.143193 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.143993 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.144018 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.144033 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.219642 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.219696 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.219828 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.219925 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.220013 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.220062 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.220437 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.220500 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.220546 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.220581 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.220607 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.220661 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.220719 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.220765 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.220803 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.322849 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.322969 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323028 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323080 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323181 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323259 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323294 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323301 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323195 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323320 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323318 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323389 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323429 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323444 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323418 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323487 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323519 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323518 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323545 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323583 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323587 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323619 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323640 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323664 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323692 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323716 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323715 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323784 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323804 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.323819 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.468887 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.478991 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.493682 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: W1211 14:14:30.496673 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-9f88fbedbeee1710e8972902122e66cab16ff94dd87707cde55589ce3e9d16db WatchSource:0}: Error finding container 9f88fbedbeee1710e8972902122e66cab16ff94dd87707cde55589ce3e9d16db: Status 404 returned error can't find the container with id 9f88fbedbeee1710e8972902122e66cab16ff94dd87707cde55589ce3e9d16db Dec 11 14:14:30 crc kubenswrapper[4690]: W1211 14:14:30.498821 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-7d6c342c943937e7f2122c27efe867268f0306ddc3758eef4de37f99de3037a3 WatchSource:0}: Error finding container 7d6c342c943937e7f2122c27efe867268f0306ddc3758eef4de37f99de3037a3: Status 404 returned error can't find the container with id 7d6c342c943937e7f2122c27efe867268f0306ddc3758eef4de37f99de3037a3 Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.515329 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.520514 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.530430 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.531857 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.531915 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.531926 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.531977 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 14:14:30 crc kubenswrapper[4690]: E1211 14:14:30.532456 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.51:6443: connect: connection refused" node="crc" Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.576509 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.580707 4690 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 21:10:55.567422783 +0000 UTC Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.637352 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"9f88fbedbeee1710e8972902122e66cab16ff94dd87707cde55589ce3e9d16db"} Dec 11 14:14:30 crc kubenswrapper[4690]: I1211 14:14:30.638311 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7d6c342c943937e7f2122c27efe867268f0306ddc3758eef4de37f99de3037a3"} Dec 11 14:14:30 crc kubenswrapper[4690]: W1211 14:14:30.789252 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-e4d1bd16f74dbd6bce3cf742086051c916c8b7c19d7600ce074d8e0d51d97676 WatchSource:0}: Error finding container e4d1bd16f74dbd6bce3cf742086051c916c8b7c19d7600ce074d8e0d51d97676: Status 404 returned error can't find the container with id e4d1bd16f74dbd6bce3cf742086051c916c8b7c19d7600ce074d8e0d51d97676 Dec 11 14:14:30 crc kubenswrapper[4690]: W1211 14:14:30.797612 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-a7c11807aab25dbca71170dab8f8553d06972d69fcbaca09d304504f787a6e83 WatchSource:0}: Error finding container a7c11807aab25dbca71170dab8f8553d06972d69fcbaca09d304504f787a6e83: Status 404 returned error can't find the container with id a7c11807aab25dbca71170dab8f8553d06972d69fcbaca09d304504f787a6e83 Dec 11 14:14:30 crc kubenswrapper[4690]: W1211 14:14:30.799547 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-b66108ddbf0c1f6460734b3a58fd723454b07058c1bec7f9ab2b34c3d045d936 WatchSource:0}: Error finding container b66108ddbf0c1f6460734b3a58fd723454b07058c1bec7f9ab2b34c3d045d936: Status 404 returned error can't find the container with id b66108ddbf0c1f6460734b3a58fd723454b07058c1bec7f9ab2b34c3d045d936 Dec 11 14:14:31 crc kubenswrapper[4690]: I1211 14:14:31.333325 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:31 crc kubenswrapper[4690]: I1211 14:14:31.334926 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:31 crc kubenswrapper[4690]: I1211 14:14:31.335018 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:31 crc kubenswrapper[4690]: I1211 14:14:31.335031 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:31 crc kubenswrapper[4690]: I1211 14:14:31.335103 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 14:14:31 crc kubenswrapper[4690]: E1211 14:14:31.335983 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.51:6443: connect: connection refused" node="crc" Dec 11 14:14:31 crc kubenswrapper[4690]: I1211 14:14:31.576585 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:31 crc kubenswrapper[4690]: I1211 14:14:31.581755 4690 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 08:05:07.1536407 +0000 UTC Dec 11 14:14:31 crc kubenswrapper[4690]: E1211 14:14:31.584534 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="3.2s" Dec 11 14:14:31 crc kubenswrapper[4690]: I1211 14:14:31.641990 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b66108ddbf0c1f6460734b3a58fd723454b07058c1bec7f9ab2b34c3d045d936"} Dec 11 14:14:31 crc kubenswrapper[4690]: I1211 14:14:31.643197 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a7c11807aab25dbca71170dab8f8553d06972d69fcbaca09d304504f787a6e83"} Dec 11 14:14:31 crc kubenswrapper[4690]: I1211 14:14:31.644242 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e4d1bd16f74dbd6bce3cf742086051c916c8b7c19d7600ce074d8e0d51d97676"} Dec 11 14:14:31 crc kubenswrapper[4690]: W1211 14:14:31.919748 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:31 crc kubenswrapper[4690]: E1211 14:14:31.919827 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Dec 11 14:14:32 crc kubenswrapper[4690]: W1211 14:14:32.193461 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:32 crc kubenswrapper[4690]: E1211 14:14:32.193565 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Dec 11 14:14:32 crc kubenswrapper[4690]: I1211 14:14:32.575822 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:32 crc kubenswrapper[4690]: I1211 14:14:32.582192 4690 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 15:13:17.777571517 +0000 UTC Dec 11 14:14:32 crc kubenswrapper[4690]: W1211 14:14:32.621747 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:32 crc kubenswrapper[4690]: E1211 14:14:32.621917 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Dec 11 14:14:32 crc kubenswrapper[4690]: I1211 14:14:32.650372 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"a7b41c7e6602fc276b366a21fe2510cadba12ee11ae6ef468b4a9df87d5ae633"} Dec 11 14:14:32 crc kubenswrapper[4690]: I1211 14:14:32.652460 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6905bfef4007acd368e6ba2e53e2eb4236b1f05b4e642b5ed5991abe0fe3ac05"} Dec 11 14:14:32 crc kubenswrapper[4690]: I1211 14:14:32.936992 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:32 crc kubenswrapper[4690]: I1211 14:14:32.940150 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:32 crc kubenswrapper[4690]: I1211 14:14:32.940203 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:32 crc kubenswrapper[4690]: I1211 14:14:32.940218 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:32 crc kubenswrapper[4690]: I1211 14:14:32.940250 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 14:14:32 crc kubenswrapper[4690]: E1211 14:14:32.940919 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.51:6443: connect: connection refused" node="crc" Dec 11 14:14:32 crc kubenswrapper[4690]: W1211 14:14:32.983854 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:32 crc kubenswrapper[4690]: E1211 14:14:32.983997 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.51:6443: connect: connection refused" logger="UnhandledError" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.575775 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.583106 4690 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 21:11:05.647424693 +0000 UTC Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.656007 4690 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="dd0066d0af2e765cdbb857b3f2fb78a15eccf76a75b45b5a7cd87f3191521d64" exitCode=0 Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.656098 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"dd0066d0af2e765cdbb857b3f2fb78a15eccf76a75b45b5a7cd87f3191521d64"} Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.656112 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.657054 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.657102 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.657114 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.657452 4690 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="a7b41c7e6602fc276b366a21fe2510cadba12ee11ae6ef468b4a9df87d5ae633" exitCode=0 Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.657530 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"a7b41c7e6602fc276b366a21fe2510cadba12ee11ae6ef468b4a9df87d5ae633"} Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.657622 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.658743 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.658768 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.658779 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.659810 4690 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a0d55fe16fad52ea58196b3e2c6f392cf5390b0ab0ec231dba2bedae56faac19" exitCode=0 Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.659864 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"a0d55fe16fad52ea58196b3e2c6f392cf5390b0ab0ec231dba2bedae56faac19"} Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.659935 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.660586 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.660613 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.660621 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.662310 4690 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="cedf619c63b8a5f7c6170d65952df460cb6fa85ea2cc124dc94567960d2565f2" exitCode=0 Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.662402 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.662403 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"cedf619c63b8a5f7c6170d65952df460cb6fa85ea2cc124dc94567960d2565f2"} Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.663112 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.663142 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.663151 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.664060 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.664668 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.664694 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:33 crc kubenswrapper[4690]: I1211 14:14:33.664704 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:34 crc kubenswrapper[4690]: I1211 14:14:34.576734 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:14:34 crc kubenswrapper[4690]: I1211 14:14:34.584203 4690 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 09:20:26.736592996 +0000 UTC Dec 11 14:14:34 crc kubenswrapper[4690]: I1211 14:14:34.584283 4690 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 523h5m52.152312972s for next certificate rotation Dec 11 14:14:34 crc kubenswrapper[4690]: I1211 14:14:34.667618 4690 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="2bd3ff41d42b9d448d136678f5e34ca9b88b160d3fb7cc13db2e4c66e931d181" exitCode=0 Dec 11 14:14:34 crc kubenswrapper[4690]: I1211 14:14:34.667682 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"2bd3ff41d42b9d448d136678f5e34ca9b88b160d3fb7cc13db2e4c66e931d181"} Dec 11 14:14:34 crc kubenswrapper[4690]: I1211 14:14:34.669688 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"b50735f3c66d2fec4db870c89c05e5ed916a84d9cc48dfffa53c8437e8a21ce8"} Dec 11 14:14:34 crc kubenswrapper[4690]: E1211 14:14:34.785500 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="6.4s" Dec 11 14:14:34 crc kubenswrapper[4690]: I1211 14:14:34.904604 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9883bd8ccac8e812372d6d0d493c68bc6600944f7d7607a6563f066d5720b36c"} Dec 11 14:14:34 crc kubenswrapper[4690]: I1211 14:14:34.908184 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"dc00d49fdf1f171f74307bad9caa2196d86629e293b9a663011216f585e04711"} Dec 11 14:14:34 crc kubenswrapper[4690]: I1211 14:14:34.912369 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9db92bcfe7099e9a716af1be228bcdb626b2b2383c72901ebbd9ea1c1aceb5a0"} Dec 11 14:14:35 crc kubenswrapper[4690]: I1211 14:14:35.918466 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9f48d51c71cbf9c8c35f0ab9f1a4bc92a9813b3be5bb2da2345cbce88a241e69"} Dec 11 14:14:35 crc kubenswrapper[4690]: I1211 14:14:35.932064 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"c2fffc442f66573f7c08dfd66617674ed24f9ebc5ec590023f7fe970de8a3d2d"} Dec 11 14:14:35 crc kubenswrapper[4690]: I1211 14:14:35.938531 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:35 crc kubenswrapper[4690]: I1211 14:14:35.939021 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"48702dab61bba8195ae012efbdf07816dddea37d296c326397f4fcbdfe766e3e"} Dec 11 14:14:35 crc kubenswrapper[4690]: I1211 14:14:35.939098 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:35 crc kubenswrapper[4690]: I1211 14:14:35.939800 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:35 crc kubenswrapper[4690]: I1211 14:14:35.939821 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:35 crc kubenswrapper[4690]: I1211 14:14:35.939828 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:35 crc kubenswrapper[4690]: I1211 14:14:35.939899 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:35 crc kubenswrapper[4690]: I1211 14:14:35.939935 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:35 crc kubenswrapper[4690]: I1211 14:14:35.939946 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:36 crc kubenswrapper[4690]: I1211 14:14:36.141305 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:36 crc kubenswrapper[4690]: I1211 14:14:36.144551 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:36 crc kubenswrapper[4690]: I1211 14:14:36.144608 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:36 crc kubenswrapper[4690]: I1211 14:14:36.144627 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:36 crc kubenswrapper[4690]: I1211 14:14:36.144654 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.947781 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"a9615188d32950a37981ff1d2404296744222ce502221df93893d3cbdafdb9c4"} Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.947818 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.947830 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ad35e4e1b19e2eb60eaf01f14cac102b2923d73b6554b5313f9b03341bdbe2b0"} Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.947846 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d35a4b3df00b428cbc8e2c97e31b53f1c712b14e5675c5dc1de3008e35d13f74"} Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.948650 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.948685 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.948695 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.952391 4690 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="93c5772a753ceb587c98116cec5abdeb3f9c58ccdefee541c187fe774ca908c9" exitCode=0 Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.952477 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"93c5772a753ceb587c98116cec5abdeb3f9c58ccdefee541c187fe774ca908c9"} Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.952634 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.953677 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.953765 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.953784 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.954841 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"9b4053354be207c8debe12c89ac2c0e0b4c5915a93336521e26695819f139a29"} Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.954933 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.955570 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.955596 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.955608 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.957486 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"65b44d7027fafa76c4101ec5f465ed92127a334304183be99142a12b1465b489"} Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.957566 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.958167 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.958193 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:37 crc kubenswrapper[4690]: I1211 14:14:37.958204 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:38 crc kubenswrapper[4690]: I1211 14:14:38.287266 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:14:38 crc kubenswrapper[4690]: I1211 14:14:38.962006 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"03ffd1e3330e84634ca0bdf799a790e06a37221dfad97303f8d87fc20490c05c"} Dec 11 14:14:38 crc kubenswrapper[4690]: I1211 14:14:38.962089 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:38 crc kubenswrapper[4690]: I1211 14:14:38.962134 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:38 crc kubenswrapper[4690]: I1211 14:14:38.962195 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:38 crc kubenswrapper[4690]: I1211 14:14:38.962257 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:14:38 crc kubenswrapper[4690]: I1211 14:14:38.962324 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 14:14:38 crc kubenswrapper[4690]: I1211 14:14:38.963056 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:38 crc kubenswrapper[4690]: I1211 14:14:38.963085 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:38 crc kubenswrapper[4690]: I1211 14:14:38.963130 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:38 crc kubenswrapper[4690]: I1211 14:14:38.963148 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:38 crc kubenswrapper[4690]: I1211 14:14:38.963100 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:38 crc kubenswrapper[4690]: I1211 14:14:38.963187 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:38 crc kubenswrapper[4690]: I1211 14:14:38.964241 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:38 crc kubenswrapper[4690]: I1211 14:14:38.964277 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:38 crc kubenswrapper[4690]: I1211 14:14:38.964289 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:39 crc kubenswrapper[4690]: I1211 14:14:39.558270 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:14:39 crc kubenswrapper[4690]: E1211 14:14:39.831712 4690 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 11 14:14:39 crc kubenswrapper[4690]: I1211 14:14:39.966876 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"86e3b2ab0f496ac74dbb345041994fce60663cc0fb93fda022eca2aa0972dd32"} Dec 11 14:14:39 crc kubenswrapper[4690]: I1211 14:14:39.966947 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:39 crc kubenswrapper[4690]: I1211 14:14:39.966983 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:39 crc kubenswrapper[4690]: I1211 14:14:39.968117 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:39 crc kubenswrapper[4690]: I1211 14:14:39.968146 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:39 crc kubenswrapper[4690]: I1211 14:14:39.968156 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:39 crc kubenswrapper[4690]: I1211 14:14:39.968159 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:39 crc kubenswrapper[4690]: I1211 14:14:39.968174 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:39 crc kubenswrapper[4690]: I1211 14:14:39.968207 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:40 crc kubenswrapper[4690]: I1211 14:14:40.245300 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:14:40 crc kubenswrapper[4690]: I1211 14:14:40.245484 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:40 crc kubenswrapper[4690]: I1211 14:14:40.246883 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:40 crc kubenswrapper[4690]: I1211 14:14:40.246919 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:40 crc kubenswrapper[4690]: I1211 14:14:40.246931 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:40 crc kubenswrapper[4690]: I1211 14:14:40.745485 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:14:40 crc kubenswrapper[4690]: I1211 14:14:40.973815 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"83da908cef65c3e26b0269ab075745e82985d674b9b2d3a66dc33c578e1af1f2"} Dec 11 14:14:40 crc kubenswrapper[4690]: I1211 14:14:40.973899 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:40 crc kubenswrapper[4690]: I1211 14:14:40.973989 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:40 crc kubenswrapper[4690]: I1211 14:14:40.975004 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:40 crc kubenswrapper[4690]: I1211 14:14:40.975057 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:40 crc kubenswrapper[4690]: I1211 14:14:40.975068 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:40 crc kubenswrapper[4690]: I1211 14:14:40.975299 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:40 crc kubenswrapper[4690]: I1211 14:14:40.975330 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:40 crc kubenswrapper[4690]: I1211 14:14:40.975342 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:41 crc kubenswrapper[4690]: I1211 14:14:41.979828 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b9f01a91a59925847cbdcfc5b1f3297c8c3750af1ac563f54680f5a46ec4b4a7"} Dec 11 14:14:42 crc kubenswrapper[4690]: I1211 14:14:42.892833 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:14:42 crc kubenswrapper[4690]: I1211 14:14:42.893118 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:42 crc kubenswrapper[4690]: I1211 14:14:42.894320 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:42 crc kubenswrapper[4690]: I1211 14:14:42.894386 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:42 crc kubenswrapper[4690]: I1211 14:14:42.894404 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:42 crc kubenswrapper[4690]: I1211 14:14:42.987121 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"2649d6d12d5cbbd582dbe2fc993dec24d079fd047311b46f8c85d832885bbae4"} Dec 11 14:14:42 crc kubenswrapper[4690]: I1211 14:14:42.987312 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:42 crc kubenswrapper[4690]: I1211 14:14:42.988221 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:42 crc kubenswrapper[4690]: I1211 14:14:42.988282 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:42 crc kubenswrapper[4690]: I1211 14:14:42.988297 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:43 crc kubenswrapper[4690]: I1211 14:14:43.989511 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:43 crc kubenswrapper[4690]: I1211 14:14:43.990310 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:43 crc kubenswrapper[4690]: I1211 14:14:43.990347 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:43 crc kubenswrapper[4690]: I1211 14:14:43.990358 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:44 crc kubenswrapper[4690]: I1211 14:14:44.451491 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 11 14:14:44 crc kubenswrapper[4690]: I1211 14:14:44.991757 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:44 crc kubenswrapper[4690]: I1211 14:14:44.992475 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:44 crc kubenswrapper[4690]: I1211 14:14:44.992513 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:44 crc kubenswrapper[4690]: I1211 14:14:44.992523 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:45 crc kubenswrapper[4690]: I1211 14:14:45.576533 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 11 14:14:45 crc kubenswrapper[4690]: I1211 14:14:45.893635 4690 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 11 14:14:45 crc kubenswrapper[4690]: I1211 14:14:45.893736 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 11 14:14:46 crc kubenswrapper[4690]: E1211 14:14:46.147008 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 11 14:14:46 crc kubenswrapper[4690]: W1211 14:14:46.191574 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 11 14:14:46 crc kubenswrapper[4690]: I1211 14:14:46.191683 4690 trace.go:236] Trace[1252287684]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 14:14:36.190) (total time: 10001ms): Dec 11 14:14:46 crc kubenswrapper[4690]: Trace[1252287684]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (14:14:46.191) Dec 11 14:14:46 crc kubenswrapper[4690]: Trace[1252287684]: [10.001568307s] [10.001568307s] END Dec 11 14:14:46 crc kubenswrapper[4690]: E1211 14:14:46.191710 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 11 14:14:46 crc kubenswrapper[4690]: E1211 14:14:46.298780 4690 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": net/http: TLS handshake timeout" event="&Event{ObjectMeta:{crc.18802ec2e7b0d179 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 14:14:28.574892409 +0000 UTC m=+0.190294052,LastTimestamp:2025-12-11 14:14:28.574892409 +0000 UTC m=+0.190294052,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 11 14:14:46 crc kubenswrapper[4690]: W1211 14:14:46.842400 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 11 14:14:46 crc kubenswrapper[4690]: I1211 14:14:46.842492 4690 trace.go:236] Trace[1497373298]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 14:14:36.840) (total time: 10001ms): Dec 11 14:14:46 crc kubenswrapper[4690]: Trace[1497373298]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (14:14:46.842) Dec 11 14:14:46 crc kubenswrapper[4690]: Trace[1497373298]: [10.001554216s] [10.001554216s] END Dec 11 14:14:46 crc kubenswrapper[4690]: E1211 14:14:46.842513 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 11 14:14:47 crc kubenswrapper[4690]: I1211 14:14:47.800197 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:14:47 crc kubenswrapper[4690]: I1211 14:14:47.800431 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:47 crc kubenswrapper[4690]: I1211 14:14:47.801802 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:47 crc kubenswrapper[4690]: I1211 14:14:47.801851 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:47 crc kubenswrapper[4690]: I1211 14:14:47.801875 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:47 crc kubenswrapper[4690]: W1211 14:14:47.863577 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 11 14:14:47 crc kubenswrapper[4690]: I1211 14:14:47.863697 4690 trace.go:236] Trace[555088208]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 14:14:37.862) (total time: 10001ms): Dec 11 14:14:47 crc kubenswrapper[4690]: Trace[555088208]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (14:14:47.863) Dec 11 14:14:47 crc kubenswrapper[4690]: Trace[555088208]: [10.001366043s] [10.001366043s] END Dec 11 14:14:47 crc kubenswrapper[4690]: E1211 14:14:47.863731 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 11 14:14:47 crc kubenswrapper[4690]: I1211 14:14:47.895193 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:14:48 crc kubenswrapper[4690]: I1211 14:14:48.002161 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:48 crc kubenswrapper[4690]: I1211 14:14:48.004583 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:48 crc kubenswrapper[4690]: I1211 14:14:48.004642 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:48 crc kubenswrapper[4690]: I1211 14:14:48.004656 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:48 crc kubenswrapper[4690]: I1211 14:14:48.007055 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:14:48 crc kubenswrapper[4690]: I1211 14:14:48.061276 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 11 14:14:48 crc kubenswrapper[4690]: I1211 14:14:48.061632 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:48 crc kubenswrapper[4690]: I1211 14:14:48.063261 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:48 crc kubenswrapper[4690]: I1211 14:14:48.063325 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:48 crc kubenswrapper[4690]: I1211 14:14:48.063343 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:49 crc kubenswrapper[4690]: I1211 14:14:49.005165 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:49 crc kubenswrapper[4690]: I1211 14:14:49.006353 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:49 crc kubenswrapper[4690]: I1211 14:14:49.006395 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:49 crc kubenswrapper[4690]: I1211 14:14:49.006422 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:49 crc kubenswrapper[4690]: W1211 14:14:49.133830 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 11 14:14:49 crc kubenswrapper[4690]: I1211 14:14:49.133935 4690 trace.go:236] Trace[1508865248]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (11-Dec-2025 14:14:39.132) (total time: 10001ms): Dec 11 14:14:49 crc kubenswrapper[4690]: Trace[1508865248]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (14:14:49.133) Dec 11 14:14:49 crc kubenswrapper[4690]: Trace[1508865248]: [10.001750926s] [10.001750926s] END Dec 11 14:14:49 crc kubenswrapper[4690]: E1211 14:14:49.134002 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 11 14:14:49 crc kubenswrapper[4690]: I1211 14:14:49.558773 4690 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded" start-of-body= Dec 11 14:14:49 crc kubenswrapper[4690]: I1211 14:14:49.558914 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": context deadline exceeded" Dec 11 14:14:49 crc kubenswrapper[4690]: E1211 14:14:49.831981 4690 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 11 14:14:51 crc kubenswrapper[4690]: E1211 14:14:51.186750 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="7s" Dec 11 14:14:52 crc kubenswrapper[4690]: I1211 14:14:52.548029 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:52 crc kubenswrapper[4690]: I1211 14:14:52.549418 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:52 crc kubenswrapper[4690]: I1211 14:14:52.549491 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:52 crc kubenswrapper[4690]: I1211 14:14:52.549504 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:52 crc kubenswrapper[4690]: I1211 14:14:52.549535 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 14:14:53 crc kubenswrapper[4690]: I1211 14:14:53.130443 4690 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 11 14:14:53 crc kubenswrapper[4690]: I1211 14:14:53.130507 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 11 14:14:54 crc kubenswrapper[4690]: I1211 14:14:54.563521 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:14:54 crc kubenswrapper[4690]: I1211 14:14:54.563802 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:54 crc kubenswrapper[4690]: I1211 14:14:54.565469 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:54 crc kubenswrapper[4690]: I1211 14:14:54.565521 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:54 crc kubenswrapper[4690]: I1211 14:14:54.565534 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:54 crc kubenswrapper[4690]: I1211 14:14:54.567513 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.017903 4690 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.017973 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.018788 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.018813 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.018822 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.537195 4690 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.578611 4690 apiserver.go:52] "Watching apiserver" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.581783 4690 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.582138 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g"] Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.582566 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.582684 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.582580 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.582743 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 14:14:55 crc kubenswrapper[4690]: E1211 14:14:55.582814 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.582840 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 14:14:55 crc kubenswrapper[4690]: E1211 14:14:55.582997 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.583119 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:14:55 crc kubenswrapper[4690]: E1211 14:14:55.583178 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.584920 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.586994 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.587054 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.587097 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.587165 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.587175 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.587097 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.587103 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.587759 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.606699 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.618680 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.628252 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.638057 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.645819 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.655088 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.667686 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.677539 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.680019 4690 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.892854 4690 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 11 14:14:55 crc kubenswrapper[4690]: I1211 14:14:55.892933 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 11 14:14:56 crc kubenswrapper[4690]: I1211 14:14:56.211126 4690 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 11 14:14:56 crc kubenswrapper[4690]: I1211 14:14:56.630363 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:14:56 crc kubenswrapper[4690]: E1211 14:14:56.631148 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 14:14:57 crc kubenswrapper[4690]: I1211 14:14:57.326428 4690 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 11 14:14:57 crc kubenswrapper[4690]: I1211 14:14:57.630827 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:14:57 crc kubenswrapper[4690]: I1211 14:14:57.630861 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:14:57 crc kubenswrapper[4690]: E1211 14:14:57.630996 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 14:14:57 crc kubenswrapper[4690]: E1211 14:14:57.631094 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.084722 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.096014 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.099908 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.102968 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.110283 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.119364 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.129631 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.132792 4690 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.133264 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.142411 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.149213 4690 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38696->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.149247 4690 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38702->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.149292 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38696->192.168.126.11:17697: read: connection reset by peer" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.149312 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:38702->192.168.126.11:17697: read: connection reset by peer" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.149610 4690 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.149634 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.154594 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.167686 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.176797 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.185464 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.193457 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.203661 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.213252 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.230068 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"287b4f74-6337-4b0a-8b88-d2a7b5dbe870\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86e3b2ab0f496ac74dbb345041994fce60663cc0fb93fda022eca2aa0972dd32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da908cef65c3e26b0269ab075745e82985d674b9b2d3a66dc33c578e1af1f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9f01a91a59925847cbdcfc5b1f3297c8c3750af1ac563f54680f5a46ec4b4a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2649d6d12d5cbbd582dbe2fc993dec24d079fd047311b46f8c85d832885bbae4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ffd1e3330e84634ca0bdf799a790e06a37221dfad97303f8d87fc20490c05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cedf619c63b8a5f7c6170d65952df460cb6fa85ea2cc124dc94567960d2565f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cedf619c63b8a5f7c6170d65952df460cb6fa85ea2cc124dc94567960d2565f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T14:14:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bd3ff41d42b9d448d136678f5e34ca9b88b160d3fb7cc13db2e4c66e931d181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bd3ff41d42b9d448d136678f5e34ca9b88b160d3fb7cc13db2e4c66e931d181\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T14:14:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:34Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://93c5772a753ceb587c98116cec5abdeb3f9c58ccdefee541c187fe774ca908c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93c5772a753ceb587c98116cec5abdeb3f9c58ccdefee541c187fe774ca908c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T14:14:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T14:14:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233126 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233160 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233181 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233199 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233227 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233244 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233259 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233274 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233296 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233318 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233334 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233348 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233365 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233429 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233448 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233463 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233481 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233500 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233518 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233505 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233534 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233567 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233612 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233642 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233670 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233693 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233693 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233713 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233722 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233812 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233837 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233855 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233873 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233798 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233907 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233915 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.233890 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234026 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234061 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234068 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234098 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234129 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234161 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234190 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234221 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234237 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234247 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234282 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234308 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234332 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234358 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234367 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234386 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234414 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234440 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234464 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234494 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234516 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234550 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234576 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234617 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234663 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234695 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234725 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234718 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234814 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234750 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234752 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.234982 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235007 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235030 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235060 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235087 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235139 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235169 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235195 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235217 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235215 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235241 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235248 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235263 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235291 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235313 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235342 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235371 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235415 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235440 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235461 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.235481 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236536 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236562 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236581 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236602 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236653 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236672 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236692 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236707 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236724 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236743 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236758 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236760 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236860 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236882 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236903 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236919 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236935 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236968 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.236985 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237009 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237001 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237033 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237121 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237161 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237189 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237219 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237278 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237244 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237315 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237431 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237442 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237477 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237519 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237523 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237543 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237583 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237600 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237631 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237663 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237688 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237689 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237714 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237753 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237810 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237826 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237854 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237892 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237927 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.237895 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238004 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238054 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238061 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238100 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238152 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238163 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238197 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238217 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238252 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238302 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238352 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238394 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238406 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238531 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238584 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238633 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238647 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238686 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238738 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238790 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238840 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238897 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.238981 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239018 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239051 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239084 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239117 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239151 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239185 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239220 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239255 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239289 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239328 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239363 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239405 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239439 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239472 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239510 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239543 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239579 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239614 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239648 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239682 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239713 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239186 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239228 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239488 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.246798 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239570 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239645 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239732 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239742 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239762 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239735 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239798 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239806 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.239888 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.240101 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.240177 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:14:58.7401532 +0000 UTC m=+30.355554853 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.247111 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.247497 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.247547 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.247595 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.247626 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.247655 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.247682 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.247693 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.247809 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.247873 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.247896 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.247919 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.247940 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.247996 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.248028 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.248057 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.248083 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.248108 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.248146 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.248187 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.248205 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.248221 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.240594 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.240653 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.240827 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.241201 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.241200 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.241228 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.241259 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.241484 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.241599 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.241717 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.242135 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.242142 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.242166 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.248363 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.242193 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.242490 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.242498 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.242777 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.242825 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.242923 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.243566 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.243809 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.243888 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.244046 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.244096 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.244102 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.244320 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.244322 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.244364 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.244432 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.244447 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.244534 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.244741 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.244775 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.244783 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.245394 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.245524 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.245587 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.245597 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.245771 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.245805 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.245927 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.245942 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.246340 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.246388 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.246887 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.246934 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.248268 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.248440 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.248495 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.248576 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.248688 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.248733 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249019 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249154 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249209 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249256 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249289 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249318 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249331 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249344 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249374 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249403 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249431 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249464 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249490 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249514 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249538 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249562 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249590 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249616 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249642 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249664 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249687 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249698 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249717 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249743 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249773 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249798 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249849 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249874 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249896 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249919 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249942 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249965 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.249979 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250003 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250062 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250077 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250113 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250146 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250176 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250184 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250206 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250238 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250248 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250269 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250301 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250326 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250355 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250380 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250411 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250439 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250469 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250566 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250582 4690 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250601 4690 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250615 4690 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250624 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250626 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250628 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250631 4690 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250702 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250717 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250747 4690 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250760 4690 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.250778 4690 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250789 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250845 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.250870 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 14:14:58.75084092 +0000 UTC m=+30.366242763 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250890 4690 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250911 4690 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250925 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250928 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.250936 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251093 4690 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251096 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251111 4690 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251164 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251166 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251185 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.251194 4690 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251210 4690 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.251244 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 14:14:58.75123258 +0000 UTC m=+30.366634223 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251341 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251361 4690 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251375 4690 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251377 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251394 4690 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251391 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251438 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251404 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251477 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251480 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251540 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251565 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251719 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251737 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251792 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251459 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251870 4690 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251882 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251894 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251907 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251918 4690 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251929 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251941 4690 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.251980 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252005 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252063 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252071 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252085 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252097 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252109 4690 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252117 4690 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252127 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252137 4690 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252148 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252157 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252167 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252178 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252187 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252197 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252206 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252216 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252226 4690 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252236 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252132 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252260 4690 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252289 4690 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252301 4690 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252311 4690 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252312 4690 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252363 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252388 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252404 4690 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252424 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252437 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252449 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252460 4690 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252472 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252484 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252495 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252507 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252517 4690 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252529 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252540 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252552 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252565 4690 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252575 4690 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252586 4690 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252597 4690 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252609 4690 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252610 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.252625 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254204 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254232 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254250 4690 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254408 4690 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254429 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254442 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254456 4690 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254469 4690 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254483 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254498 4690 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254510 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254522 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254534 4690 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254546 4690 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254559 4690 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254571 4690 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254584 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254598 4690 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254612 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254623 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254632 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254641 4690 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254668 4690 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254677 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254688 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254699 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254711 4690 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254722 4690 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254732 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254743 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254752 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254761 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254771 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254782 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254791 4690 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254801 4690 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254810 4690 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254819 4690 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254829 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254838 4690 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254848 4690 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.254856 4690 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.257928 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.262906 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.263530 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.265975 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.266014 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.266029 4690 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.266281 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 14:14:58.76625822 +0000 UTC m=+30.381659863 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.266810 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.267037 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.267253 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.267423 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.267685 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.267829 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.267867 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.267865 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.267882 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.268259 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.268363 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.268483 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.268567 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.268637 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.268670 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.268733 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.268779 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.269175 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.269330 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.270468 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.270661 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.270872 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.270894 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.270909 4690 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.270998 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 14:14:58.77097734 +0000 UTC m=+30.386378983 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.271801 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.271815 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.271869 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.272031 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.272196 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.272215 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.272235 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.272247 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.272255 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.272334 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.272443 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.272567 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.272923 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.272939 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.273236 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.276770 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.278503 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.281275 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.281475 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.281540 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.281638 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.281867 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.282054 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.282258 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.282331 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.282680 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.282687 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.283085 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.283129 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.283257 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.283296 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.283389 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.285215 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355316 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355383 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355435 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355447 4690 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355491 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355500 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355510 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355519 4690 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355529 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355538 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355550 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355563 4690 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355575 4690 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355585 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355596 4690 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355607 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355533 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355619 4690 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355697 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355706 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355716 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355728 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355739 4690 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355749 4690 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355757 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355765 4690 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355585 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.355776 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356001 4690 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356072 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356087 4690 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356102 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356117 4690 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356130 4690 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356142 4690 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356154 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356194 4690 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356209 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356221 4690 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356234 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356248 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356262 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356274 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356286 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356298 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356311 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356347 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356361 4690 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356374 4690 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356389 4690 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356403 4690 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356415 4690 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356428 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356439 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356453 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356465 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356477 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356518 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356530 4690 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356540 4690 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356554 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356565 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356576 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356587 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356598 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356608 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356619 4690 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356629 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356643 4690 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356697 4690 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356710 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356720 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356732 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356742 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356751 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356760 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356770 4690 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356780 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356790 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.356799 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.607176 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.631187 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.631417 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.638989 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.639579 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.641064 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.641754 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.642702 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.643222 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.643762 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.644717 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.645443 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.646554 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.647158 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.648229 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.648827 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.649540 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.650336 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"287b4f74-6337-4b0a-8b88-d2a7b5dbe870\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86e3b2ab0f496ac74dbb345041994fce60663cc0fb93fda022eca2aa0972dd32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da908cef65c3e26b0269ab075745e82985d674b9b2d3a66dc33c578e1af1f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9f01a91a59925847cbdcfc5b1f3297c8c3750af1ac563f54680f5a46ec4b4a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2649d6d12d5cbbd582dbe2fc993dec24d079fd047311b46f8c85d832885bbae4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ffd1e3330e84634ca0bdf799a790e06a37221dfad97303f8d87fc20490c05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cedf619c63b8a5f7c6170d65952df460cb6fa85ea2cc124dc94567960d2565f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cedf619c63b8a5f7c6170d65952df460cb6fa85ea2cc124dc94567960d2565f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T14:14:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bd3ff41d42b9d448d136678f5e34ca9b88b160d3fb7cc13db2e4c66e931d181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bd3ff41d42b9d448d136678f5e34ca9b88b160d3fb7cc13db2e4c66e931d181\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T14:14:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:34Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://93c5772a753ceb587c98116cec5abdeb3f9c58ccdefee541c187fe774ca908c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93c5772a753ceb587c98116cec5abdeb3f9c58ccdefee541c187fe774ca908c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T14:14:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T14:14:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.650506 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.651412 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.652276 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.653424 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.654128 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.654686 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.657037 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.660659 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.661292 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.672460 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.681923 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.692689 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.701549 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.712142 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.761773 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.761860 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.761932 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.762042 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:14:59.762014582 +0000 UTC m=+31.377416255 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.762042 4690 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.762076 4690 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.762116 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 14:14:59.762107695 +0000 UTC m=+31.377509438 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.762163 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 14:14:59.762129915 +0000 UTC m=+31.377531668 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.863301 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:14:58 crc kubenswrapper[4690]: I1211 14:14:58.863352 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.863463 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.863489 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.863502 4690 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.863545 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 14:14:59.863532941 +0000 UTC m=+31.478934584 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.863461 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.863585 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.863598 4690 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:14:58 crc kubenswrapper[4690]: E1211 14:14:58.863642 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 14:14:59.863627853 +0000 UTC m=+31.479029496 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:14:59 crc kubenswrapper[4690]: E1211 14:14:59.035565 4690 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"etcd-crc\" already exists" pod="openshift-etcd/etcd-crc" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.164204 4690 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.235020 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.235817 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.245014 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.246272 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.249379 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.250771 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.251420 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.252585 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: W1211 14:14:59.253446 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-7a5f1d111662574279ff1c25fda0022416ea4549a9b48bd6cb49fd1af65a91ec WatchSource:0}: Error finding container 7a5f1d111662574279ff1c25fda0022416ea4549a9b48bd6cb49fd1af65a91ec: Status 404 returned error can't find the container with id 7a5f1d111662574279ff1c25fda0022416ea4549a9b48bd6cb49fd1af65a91ec Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.256134 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.256898 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.257872 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.259501 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.260579 4690 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.260962 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.263808 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.264013 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.264679 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.265794 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.265794 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.266375 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.266481 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.266573 4690 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.266657 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.267582 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.268220 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.269523 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.270898 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.272173 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.272634 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.273318 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.274301 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.275321 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.275774 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.276765 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.277362 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.278484 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.279130 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.280162 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.280885 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.281519 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.282170 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.283112 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.498631 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.512496 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 11 14:14:59 crc kubenswrapper[4690]: W1211 14:14:59.522393 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-ddc53ea70d8579037bedcba3a5495fc9902b3c1e83376a1c5ff7a8478c88889e WatchSource:0}: Error finding container ddc53ea70d8579037bedcba3a5495fc9902b3c1e83376a1c5ff7a8478c88889e: Status 404 returned error can't find the container with id ddc53ea70d8579037bedcba3a5495fc9902b3c1e83376a1c5ff7a8478c88889e Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.630264 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:14:59 crc kubenswrapper[4690]: E1211 14:14:59.630457 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.630288 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:14:59 crc kubenswrapper[4690]: E1211 14:14:59.630625 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 14:14:59 crc kubenswrapper[4690]: W1211 14:14:59.704835 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-85b2b5e829f4f19f3fed94346a9626207474e145924204aed4b7488634181dca WatchSource:0}: Error finding container 85b2b5e829f4f19f3fed94346a9626207474e145924204aed4b7488634181dca: Status 404 returned error can't find the container with id 85b2b5e829f4f19f3fed94346a9626207474e145924204aed4b7488634181dca Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.769883 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:14:59 crc kubenswrapper[4690]: E1211 14:14:59.770092 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:01.770063874 +0000 UTC m=+33.385465527 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.770322 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:14:59 crc kubenswrapper[4690]: E1211 14:14:59.770449 4690 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 14:14:59 crc kubenswrapper[4690]: E1211 14:14:59.770507 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:01.770497785 +0000 UTC m=+33.385899438 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.770459 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:14:59 crc kubenswrapper[4690]: E1211 14:14:59.770751 4690 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 14:14:59 crc kubenswrapper[4690]: E1211 14:14:59.770905 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:01.770884384 +0000 UTC m=+33.386286097 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.871644 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:14:59 crc kubenswrapper[4690]: I1211 14:14:59.871697 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:14:59 crc kubenswrapper[4690]: E1211 14:14:59.871819 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 14:14:59 crc kubenswrapper[4690]: E1211 14:14:59.871839 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 14:14:59 crc kubenswrapper[4690]: E1211 14:14:59.871851 4690 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:14:59 crc kubenswrapper[4690]: E1211 14:14:59.871908 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:01.87189273 +0000 UTC m=+33.487294373 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:14:59 crc kubenswrapper[4690]: E1211 14:14:59.871902 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 14:14:59 crc kubenswrapper[4690]: E1211 14:14:59.871941 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 14:14:59 crc kubenswrapper[4690]: E1211 14:14:59.871971 4690 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:14:59 crc kubenswrapper[4690]: E1211 14:14:59.872042 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:01.872023073 +0000 UTC m=+33.487424776 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.029598 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"7a5f1d111662574279ff1c25fda0022416ea4549a9b48bd6cb49fd1af65a91ec"} Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.031206 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.032753 4690 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a9615188d32950a37981ff1d2404296744222ce502221df93893d3cbdafdb9c4" exitCode=255 Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.032815 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"a9615188d32950a37981ff1d2404296744222ce502221df93893d3cbdafdb9c4"} Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.033802 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"85b2b5e829f4f19f3fed94346a9626207474e145924204aed4b7488634181dca"} Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.034774 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"ddc53ea70d8579037bedcba3a5495fc9902b3c1e83376a1c5ff7a8478c88889e"} Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.043169 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.043275 4690 scope.go:117] "RemoveContainer" containerID="a9615188d32950a37981ff1d2404296744222ce502221df93893d3cbdafdb9c4" Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.044039 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.059114 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"287b4f74-6337-4b0a-8b88-d2a7b5dbe870\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86e3b2ab0f496ac74dbb345041994fce60663cc0fb93fda022eca2aa0972dd32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da908cef65c3e26b0269ab075745e82985d674b9b2d3a66dc33c578e1af1f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9f01a91a59925847cbdcfc5b1f3297c8c3750af1ac563f54680f5a46ec4b4a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2649d6d12d5cbbd582dbe2fc993dec24d079fd047311b46f8c85d832885bbae4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ffd1e3330e84634ca0bdf799a790e06a37221dfad97303f8d87fc20490c05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cedf619c63b8a5f7c6170d65952df460cb6fa85ea2cc124dc94567960d2565f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cedf619c63b8a5f7c6170d65952df460cb6fa85ea2cc124dc94567960d2565f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T14:14:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bd3ff41d42b9d448d136678f5e34ca9b88b160d3fb7cc13db2e4c66e931d181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bd3ff41d42b9d448d136678f5e34ca9b88b160d3fb7cc13db2e4c66e931d181\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T14:14:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:34Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://93c5772a753ceb587c98116cec5abdeb3f9c58ccdefee541c187fe774ca908c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93c5772a753ceb587c98116cec5abdeb3f9c58ccdefee541c187fe774ca908c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T14:14:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T14:14:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.073418 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.082478 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.092337 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.100859 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.112894 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.630115 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:15:00 crc kubenswrapper[4690]: E1211 14:15:00.630244 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.633667 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 11 14:15:00 crc kubenswrapper[4690]: I1211 14:15:00.634495 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.040383 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.042323 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"9a8a4902e0c6ee75ff794209a28d8f53bd597ce51f863529640262e123086e81"} Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.042656 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.044092 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"b4bf4c587e6a12d8058fc4b7103d4ed04b2fb00644c1b40bee178635f81e81d0"} Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.044256 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"ad5a842734d2773463668b87c3b57291d82ac8527367dc24f768012937525bce"} Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.045535 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"03c835d567c55a811f46bc0524779a9dde8722c31dc27a5d7d20a2922c9b95c7"} Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.058890 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T14:15:01Z is after 2025-08-24T17:21:41Z" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.071044 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T14:15:01Z is after 2025-08-24T17:21:41Z" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.091870 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T14:15:01Z is after 2025-08-24T17:21:41Z" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.111240 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T14:15:01Z is after 2025-08-24T17:21:41Z" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.125002 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T14:15:01Z is after 2025-08-24T17:21:41Z" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.140119 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T14:15:01Z is after 2025-08-24T17:21:41Z" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.162499 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"287b4f74-6337-4b0a-8b88-d2a7b5dbe870\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86e3b2ab0f496ac74dbb345041994fce60663cc0fb93fda022eca2aa0972dd32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da908cef65c3e26b0269ab075745e82985d674b9b2d3a66dc33c578e1af1f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9f01a91a59925847cbdcfc5b1f3297c8c3750af1ac563f54680f5a46ec4b4a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2649d6d12d5cbbd582dbe2fc993dec24d079fd047311b46f8c85d832885bbae4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ffd1e3330e84634ca0bdf799a790e06a37221dfad97303f8d87fc20490c05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cedf619c63b8a5f7c6170d65952df460cb6fa85ea2cc124dc94567960d2565f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cedf619c63b8a5f7c6170d65952df460cb6fa85ea2cc124dc94567960d2565f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T14:14:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bd3ff41d42b9d448d136678f5e34ca9b88b160d3fb7cc13db2e4c66e931d181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bd3ff41d42b9d448d136678f5e34ca9b88b160d3fb7cc13db2e4c66e931d181\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T14:14:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:34Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://93c5772a753ceb587c98116cec5abdeb3f9c58ccdefee541c187fe774ca908c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93c5772a753ceb587c98116cec5abdeb3f9c58ccdefee541c187fe774ca908c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T14:14:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T14:14:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T14:15:01Z is after 2025-08-24T17:21:41Z" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.182551 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca93a1ee-ec99-4255-9ae2-a987cf127929\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9db92bcfe7099e9a716af1be228bcdb626b2b2383c72901ebbd9ea1c1aceb5a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35a4b3df00b428cbc8e2c97e31b53f1c712b14e5675c5dc1de3008e35d13f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48702dab61bba8195ae012efbdf07816dddea37d296c326397f4fcbdfe766e3e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a8a4902e0c6ee75ff794209a28d8f53bd597ce51f863529640262e123086e81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9615188d32950a37981ff1d2404296744222ce502221df93893d3cbdafdb9c4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T14:14:59Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1211 14:14:48.299527 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 14:14:48.301904 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2332406208/tls.crt::/tmp/serving-cert-2332406208/tls.key\\\\\\\"\\\\nI1211 14:14:58.133616 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 14:14:58.136634 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 14:14:58.136657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 14:14:58.136683 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 14:14:58.136690 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 14:14:58.142381 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 14:14:58.142403 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 14:14:58.142413 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 14:14:58.142420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 14:14:58.142425 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 14:14:58.142428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 14:14:58.142431 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 14:14:58.142433 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 14:14:58.144301 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:15:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad35e4e1b19e2eb60eaf01f14cac102b2923d73b6554b5313f9b03341bdbe2b0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d55fe16fad52ea58196b3e2c6f392cf5390b0ab0ec231dba2bedae56faac19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0d55fe16fad52ea58196b3e2c6f392cf5390b0ab0ec231dba2bedae56faac19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T14:14:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T14:14:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T14:15:01Z is after 2025-08-24T17:21:41Z" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.196441 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:15:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:15:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:15:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://03c835d567c55a811f46bc0524779a9dde8722c31dc27a5d7d20a2922c9b95c7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:15:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T14:15:01Z is after 2025-08-24T17:21:41Z" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.209246 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T14:15:01Z is after 2025-08-24T17:21:41Z" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.225155 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T14:15:01Z is after 2025-08-24T17:21:41Z" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.242782 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"287b4f74-6337-4b0a-8b88-d2a7b5dbe870\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86e3b2ab0f496ac74dbb345041994fce60663cc0fb93fda022eca2aa0972dd32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://83da908cef65c3e26b0269ab075745e82985d674b9b2d3a66dc33c578e1af1f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b9f01a91a59925847cbdcfc5b1f3297c8c3750af1ac563f54680f5a46ec4b4a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2649d6d12d5cbbd582dbe2fc993dec24d079fd047311b46f8c85d832885bbae4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03ffd1e3330e84634ca0bdf799a790e06a37221dfad97303f8d87fc20490c05c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cedf619c63b8a5f7c6170d65952df460cb6fa85ea2cc124dc94567960d2565f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cedf619c63b8a5f7c6170d65952df460cb6fa85ea2cc124dc94567960d2565f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T14:14:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2bd3ff41d42b9d448d136678f5e34ca9b88b160d3fb7cc13db2e4c66e931d181\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2bd3ff41d42b9d448d136678f5e34ca9b88b160d3fb7cc13db2e4c66e931d181\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T14:14:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:34Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://93c5772a753ceb587c98116cec5abdeb3f9c58ccdefee541c187fe774ca908c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://93c5772a753ceb587c98116cec5abdeb3f9c58ccdefee541c187fe774ca908c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T14:14:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T14:14:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T14:15:01Z is after 2025-08-24T17:21:41Z" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.255633 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ca93a1ee-ec99-4255-9ae2-a987cf127929\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:30Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9db92bcfe7099e9a716af1be228bcdb626b2b2383c72901ebbd9ea1c1aceb5a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d35a4b3df00b428cbc8e2c97e31b53f1c712b14e5675c5dc1de3008e35d13f74\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://48702dab61bba8195ae012efbdf07816dddea37d296c326397f4fcbdfe766e3e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9a8a4902e0c6ee75ff794209a28d8f53bd597ce51f863529640262e123086e81\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9615188d32950a37981ff1d2404296744222ce502221df93893d3cbdafdb9c4\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-11T14:14:59Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1211 14:14:48.299527 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1211 14:14:48.301904 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2332406208/tls.crt::/tmp/serving-cert-2332406208/tls.key\\\\\\\"\\\\nI1211 14:14:58.133616 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1211 14:14:58.136634 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1211 14:14:58.136657 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1211 14:14:58.136683 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1211 14:14:58.136690 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1211 14:14:58.142381 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1211 14:14:58.142403 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1211 14:14:58.142413 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 14:14:58.142420 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1211 14:14:58.142425 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1211 14:14:58.142428 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1211 14:14:58.142431 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1211 14:14:58.142433 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1211 14:14:58.144301 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:37Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:15:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad35e4e1b19e2eb60eaf01f14cac102b2923d73b6554b5313f9b03341bdbe2b0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:14:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a0d55fe16fad52ea58196b3e2c6f392cf5390b0ab0ec231dba2bedae56faac19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a0d55fe16fad52ea58196b3e2c6f392cf5390b0ab0ec231dba2bedae56faac19\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-11T14:14:33Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-11T14:14:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-11T14:14:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T14:15:01Z is after 2025-08-24T17:21:41Z" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.266372 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T14:15:01Z is after 2025-08-24T17:21:41Z" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.277565 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:15:01Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:15:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:15:01Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b4bf4c587e6a12d8058fc4b7103d4ed04b2fb00644c1b40bee178635f81e81d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:15:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a842734d2773463668b87c3b57291d82ac8527367dc24f768012937525bce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-11T14:15:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T14:15:01Z is after 2025-08-24T17:21:41Z" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.289369 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-11T14:14:55Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-11T14:15:01Z is after 2025-08-24T17:21:41Z" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.629865 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.629932 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:01 crc kubenswrapper[4690]: E1211 14:15:01.630035 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 14:15:01 crc kubenswrapper[4690]: E1211 14:15:01.630195 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.787668 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.787743 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:01 crc kubenswrapper[4690]: E1211 14:15:01.787814 4690 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 14:15:01 crc kubenswrapper[4690]: E1211 14:15:01.787819 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:05.78779046 +0000 UTC m=+37.403192103 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:01 crc kubenswrapper[4690]: E1211 14:15:01.787861 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:05.787848651 +0000 UTC m=+37.403250284 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.787886 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:01 crc kubenswrapper[4690]: E1211 14:15:01.787980 4690 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 14:15:01 crc kubenswrapper[4690]: E1211 14:15:01.788011 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:05.788005205 +0000 UTC m=+37.403406848 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.889150 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:15:01 crc kubenswrapper[4690]: I1211 14:15:01.889199 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:15:01 crc kubenswrapper[4690]: E1211 14:15:01.889315 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 14:15:01 crc kubenswrapper[4690]: E1211 14:15:01.889317 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 14:15:01 crc kubenswrapper[4690]: E1211 14:15:01.889333 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 14:15:01 crc kubenswrapper[4690]: E1211 14:15:01.889341 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 14:15:01 crc kubenswrapper[4690]: E1211 14:15:01.889347 4690 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:15:01 crc kubenswrapper[4690]: E1211 14:15:01.889352 4690 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:15:01 crc kubenswrapper[4690]: E1211 14:15:01.889398 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:05.88938423 +0000 UTC m=+37.504785873 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:15:01 crc kubenswrapper[4690]: E1211 14:15:01.889412 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:05.88940639 +0000 UTC m=+37.504808033 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:15:02 crc kubenswrapper[4690]: I1211 14:15:02.630430 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:15:02 crc kubenswrapper[4690]: E1211 14:15:02.630593 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 14:15:02 crc kubenswrapper[4690]: I1211 14:15:02.896432 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:15:02 crc kubenswrapper[4690]: I1211 14:15:02.899626 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:15:02 crc kubenswrapper[4690]: I1211 14:15:02.904126 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 11 14:15:02 crc kubenswrapper[4690]: I1211 14:15:02.928984 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=4.928943008 podStartE2EDuration="4.928943008s" podCreationTimestamp="2025-12-11 14:14:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:02.927944313 +0000 UTC m=+34.543345956" watchObservedRunningTime="2025-12-11 14:15:02.928943008 +0000 UTC m=+34.544344651" Dec 11 14:15:02 crc kubenswrapper[4690]: I1211 14:15:02.947505 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=2.947490157 podStartE2EDuration="2.947490157s" podCreationTimestamp="2025-12-11 14:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:02.947255281 +0000 UTC m=+34.562656934" watchObservedRunningTime="2025-12-11 14:15:02.947490157 +0000 UTC m=+34.562891790" Dec 11 14:15:03 crc kubenswrapper[4690]: I1211 14:15:03.029366 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=1.029340728 podStartE2EDuration="1.029340728s" podCreationTimestamp="2025-12-11 14:15:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:03.029322108 +0000 UTC m=+34.644723761" watchObservedRunningTime="2025-12-11 14:15:03.029340728 +0000 UTC m=+34.644742391" Dec 11 14:15:03 crc kubenswrapper[4690]: I1211 14:15:03.051878 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"5928d998896a4e5ead915276a2816bf6440e6fa76c3c0a68d079e41f64171f9c"} Dec 11 14:15:03 crc kubenswrapper[4690]: I1211 14:15:03.630351 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:03 crc kubenswrapper[4690]: I1211 14:15:03.630366 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:15:03 crc kubenswrapper[4690]: E1211 14:15:03.630824 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 14:15:03 crc kubenswrapper[4690]: E1211 14:15:03.630914 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.099210 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-qkg6f"] Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.099586 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-qkg6f" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.102822 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.103221 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.103401 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.179479 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-8nnxw"] Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.179881 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-8nnxw" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.181985 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.182306 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.182829 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.184806 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.208641 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/90bf1d67-4002-4bf2-b885-112e2b676f00-hosts-file\") pod \"node-resolver-qkg6f\" (UID: \"90bf1d67-4002-4bf2-b885-112e2b676f00\") " pod="openshift-dns/node-resolver-qkg6f" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.208707 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxsb4\" (UniqueName: \"kubernetes.io/projected/90bf1d67-4002-4bf2-b885-112e2b676f00-kube-api-access-jxsb4\") pod \"node-resolver-qkg6f\" (UID: \"90bf1d67-4002-4bf2-b885-112e2b676f00\") " pod="openshift-dns/node-resolver-qkg6f" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.297246 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-z9662"] Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.304511 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-z9662" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.307867 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-gzbqz"] Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.308432 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-nbps6"] Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.308525 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.308614 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.308762 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.308919 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.309337 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.309537 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.309691 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.309719 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxsb4\" (UniqueName: \"kubernetes.io/projected/90bf1d67-4002-4bf2-b885-112e2b676f00-kube-api-access-jxsb4\") pod \"node-resolver-qkg6f\" (UID: \"90bf1d67-4002-4bf2-b885-112e2b676f00\") " pod="openshift-dns/node-resolver-qkg6f" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.309759 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kvn4z\" (UniqueName: \"kubernetes.io/projected/449308b0-3cfd-4d3c-890d-552ede5466e6-kube-api-access-kvn4z\") pod \"node-ca-8nnxw\" (UID: \"449308b0-3cfd-4d3c-890d-552ede5466e6\") " pod="openshift-image-registry/node-ca-8nnxw" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.309779 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/449308b0-3cfd-4d3c-890d-552ede5466e6-host\") pod \"node-ca-8nnxw\" (UID: \"449308b0-3cfd-4d3c-890d-552ede5466e6\") " pod="openshift-image-registry/node-ca-8nnxw" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.309804 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/90bf1d67-4002-4bf2-b885-112e2b676f00-hosts-file\") pod \"node-resolver-qkg6f\" (UID: \"90bf1d67-4002-4bf2-b885-112e2b676f00\") " pod="openshift-dns/node-resolver-qkg6f" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.309818 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/449308b0-3cfd-4d3c-890d-552ede5466e6-serviceca\") pod \"node-ca-8nnxw\" (UID: \"449308b0-3cfd-4d3c-890d-552ede5466e6\") " pod="openshift-image-registry/node-ca-8nnxw" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.309913 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/90bf1d67-4002-4bf2-b885-112e2b676f00-hosts-file\") pod \"node-resolver-qkg6f\" (UID: \"90bf1d67-4002-4bf2-b885-112e2b676f00\") " pod="openshift-dns/node-resolver-qkg6f" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.316517 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.316685 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.316823 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.316849 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.316643 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.317274 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.317439 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.333858 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-r8sd9"] Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.334404 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:04 crc kubenswrapper[4690]: E1211 14:15:04.334477 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r8sd9" podUID="0cbb05eb-6650-45bc-ae3f-d29df5940583" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.337731 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxsb4\" (UniqueName: \"kubernetes.io/projected/90bf1d67-4002-4bf2-b885-112e2b676f00-kube-api-access-jxsb4\") pod \"node-resolver-qkg6f\" (UID: \"90bf1d67-4002-4bf2-b885-112e2b676f00\") " pod="openshift-dns/node-resolver-qkg6f" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.410348 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-run-k8s-cni-cncf-io\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.410627 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6860b472-f9ad-4fea-971b-44df4aa52606-cnibin\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.410754 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6860b472-f9ad-4fea-971b-44df4aa52606-tuning-conf-dir\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.410886 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-var-lib-kubelet\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411060 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-os-release\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411107 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-etc-kubernetes\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411126 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-var-lib-cni-bin\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411141 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-multus-conf-dir\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411166 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/6860b472-f9ad-4fea-971b-44df4aa52606-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411201 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kvn4z\" (UniqueName: \"kubernetes.io/projected/449308b0-3cfd-4d3c-890d-552ede5466e6-kube-api-access-kvn4z\") pod \"node-ca-8nnxw\" (UID: \"449308b0-3cfd-4d3c-890d-552ede5466e6\") " pod="openshift-image-registry/node-ca-8nnxw" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411224 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/44a7b31b-09dd-452b-87ba-29764eaa0206-proxy-tls\") pod \"machine-config-daemon-z9662\" (UID: \"44a7b31b-09dd-452b-87ba-29764eaa0206\") " pod="openshift-machine-config-operator/machine-config-daemon-z9662" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411248 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6860b472-f9ad-4fea-971b-44df4aa52606-os-release\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411277 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/449308b0-3cfd-4d3c-890d-552ede5466e6-serviceca\") pod \"node-ca-8nnxw\" (UID: \"449308b0-3cfd-4d3c-890d-552ede5466e6\") " pod="openshift-image-registry/node-ca-8nnxw" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411302 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-multus-socket-dir-parent\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411323 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-run-netns\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411345 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/44a7b31b-09dd-452b-87ba-29764eaa0206-rootfs\") pod \"machine-config-daemon-z9662\" (UID: \"44a7b31b-09dd-452b-87ba-29764eaa0206\") " pod="openshift-machine-config-operator/machine-config-daemon-z9662" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411363 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-system-cni-dir\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411380 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-cnibin\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411398 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-run-multus-certs\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411416 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zd4j6\" (UniqueName: \"kubernetes.io/projected/44a7b31b-09dd-452b-87ba-29764eaa0206-kube-api-access-zd4j6\") pod \"machine-config-daemon-z9662\" (UID: \"44a7b31b-09dd-452b-87ba-29764eaa0206\") " pod="openshift-machine-config-operator/machine-config-daemon-z9662" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411447 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-multus-cni-dir\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411464 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/845eca40-4a90-4dfb-b177-610dd3860602-cni-binary-copy\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411479 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-var-lib-cni-multus\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411502 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs\") pod \"network-metrics-daemon-r8sd9\" (UID: \"0cbb05eb-6650-45bc-ae3f-d29df5940583\") " pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411529 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/845eca40-4a90-4dfb-b177-610dd3860602-multus-daemon-config\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411545 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-hostroot\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411560 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44vkq\" (UniqueName: \"kubernetes.io/projected/845eca40-4a90-4dfb-b177-610dd3860602-kube-api-access-44vkq\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411576 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6860b472-f9ad-4fea-971b-44df4aa52606-cni-binary-copy\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411594 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/449308b0-3cfd-4d3c-890d-552ede5466e6-host\") pod \"node-ca-8nnxw\" (UID: \"449308b0-3cfd-4d3c-890d-552ede5466e6\") " pod="openshift-image-registry/node-ca-8nnxw" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411624 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7dsd\" (UniqueName: \"kubernetes.io/projected/6860b472-f9ad-4fea-971b-44df4aa52606-kube-api-access-h7dsd\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411649 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnj8r\" (UniqueName: \"kubernetes.io/projected/0cbb05eb-6650-45bc-ae3f-d29df5940583-kube-api-access-fnj8r\") pod \"network-metrics-daemon-r8sd9\" (UID: \"0cbb05eb-6650-45bc-ae3f-d29df5940583\") " pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411670 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/44a7b31b-09dd-452b-87ba-29764eaa0206-mcd-auth-proxy-config\") pod \"machine-config-daemon-z9662\" (UID: \"44a7b31b-09dd-452b-87ba-29764eaa0206\") " pod="openshift-machine-config-operator/machine-config-daemon-z9662" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.411710 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6860b472-f9ad-4fea-971b-44df4aa52606-system-cni-dir\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.412317 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/449308b0-3cfd-4d3c-890d-552ede5466e6-host\") pod \"node-ca-8nnxw\" (UID: \"449308b0-3cfd-4d3c-890d-552ede5466e6\") " pod="openshift-image-registry/node-ca-8nnxw" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.413801 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/449308b0-3cfd-4d3c-890d-552ede5466e6-serviceca\") pod \"node-ca-8nnxw\" (UID: \"449308b0-3cfd-4d3c-890d-552ede5466e6\") " pod="openshift-image-registry/node-ca-8nnxw" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.415520 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-qkg6f" Dec 11 14:15:04 crc kubenswrapper[4690]: W1211 14:15:04.428364 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod90bf1d67_4002_4bf2_b885_112e2b676f00.slice/crio-601cdc605b86f983d8e0b92a41767a0a2a1467418feb513ac456b95664a2946c WatchSource:0}: Error finding container 601cdc605b86f983d8e0b92a41767a0a2a1467418feb513ac456b95664a2946c: Status 404 returned error can't find the container with id 601cdc605b86f983d8e0b92a41767a0a2a1467418feb513ac456b95664a2946c Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.440003 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kvn4z\" (UniqueName: \"kubernetes.io/projected/449308b0-3cfd-4d3c-890d-552ede5466e6-kube-api-access-kvn4z\") pod \"node-ca-8nnxw\" (UID: \"449308b0-3cfd-4d3c-890d-552ede5466e6\") " pod="openshift-image-registry/node-ca-8nnxw" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.493422 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-8nnxw" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.512968 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-var-lib-kubelet\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513024 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-etc-kubernetes\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513046 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-os-release\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513065 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-multus-conf-dir\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513085 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/6860b472-f9ad-4fea-971b-44df4aa52606-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513109 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-var-lib-cni-bin\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513139 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/44a7b31b-09dd-452b-87ba-29764eaa0206-proxy-tls\") pod \"machine-config-daemon-z9662\" (UID: \"44a7b31b-09dd-452b-87ba-29764eaa0206\") " pod="openshift-machine-config-operator/machine-config-daemon-z9662" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513158 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6860b472-f9ad-4fea-971b-44df4aa52606-os-release\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513174 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-multus-socket-dir-parent\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513189 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-run-netns\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513180 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-var-lib-kubelet\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513254 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/44a7b31b-09dd-452b-87ba-29764eaa0206-rootfs\") pod \"machine-config-daemon-z9662\" (UID: \"44a7b31b-09dd-452b-87ba-29764eaa0206\") " pod="openshift-machine-config-operator/machine-config-daemon-z9662" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513282 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-var-lib-cni-bin\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513205 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/44a7b31b-09dd-452b-87ba-29764eaa0206-rootfs\") pod \"machine-config-daemon-z9662\" (UID: \"44a7b31b-09dd-452b-87ba-29764eaa0206\") " pod="openshift-machine-config-operator/machine-config-daemon-z9662" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513373 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-run-multus-certs\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513399 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zd4j6\" (UniqueName: \"kubernetes.io/projected/44a7b31b-09dd-452b-87ba-29764eaa0206-kube-api-access-zd4j6\") pod \"machine-config-daemon-z9662\" (UID: \"44a7b31b-09dd-452b-87ba-29764eaa0206\") " pod="openshift-machine-config-operator/machine-config-daemon-z9662" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513424 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-system-cni-dir\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513443 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-cnibin\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513476 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-multus-cni-dir\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513494 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/845eca40-4a90-4dfb-b177-610dd3860602-cni-binary-copy\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513511 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-var-lib-cni-multus\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513535 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs\") pod \"network-metrics-daemon-r8sd9\" (UID: \"0cbb05eb-6650-45bc-ae3f-d29df5940583\") " pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513572 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/845eca40-4a90-4dfb-b177-610dd3860602-multus-daemon-config\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513617 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44vkq\" (UniqueName: \"kubernetes.io/projected/845eca40-4a90-4dfb-b177-610dd3860602-kube-api-access-44vkq\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513645 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6860b472-f9ad-4fea-971b-44df4aa52606-cni-binary-copy\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513676 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-hostroot\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513702 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7dsd\" (UniqueName: \"kubernetes.io/projected/6860b472-f9ad-4fea-971b-44df4aa52606-kube-api-access-h7dsd\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513725 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6860b472-f9ad-4fea-971b-44df4aa52606-system-cni-dir\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513748 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnj8r\" (UniqueName: \"kubernetes.io/projected/0cbb05eb-6650-45bc-ae3f-d29df5940583-kube-api-access-fnj8r\") pod \"network-metrics-daemon-r8sd9\" (UID: \"0cbb05eb-6650-45bc-ae3f-d29df5940583\") " pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513774 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/44a7b31b-09dd-452b-87ba-29764eaa0206-mcd-auth-proxy-config\") pod \"machine-config-daemon-z9662\" (UID: \"44a7b31b-09dd-452b-87ba-29764eaa0206\") " pod="openshift-machine-config-operator/machine-config-daemon-z9662" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513807 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6860b472-f9ad-4fea-971b-44df4aa52606-tuning-conf-dir\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513841 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-run-k8s-cni-cncf-io\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513866 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6860b472-f9ad-4fea-971b-44df4aa52606-cnibin\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.513946 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/6860b472-f9ad-4fea-971b-44df4aa52606-cnibin\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.514017 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-multus-cni-dir\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.514099 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-etc-kubernetes\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.514284 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-os-release\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.514368 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-run-multus-certs\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.514461 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-multus-conf-dir\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.514666 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-system-cni-dir\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.514705 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-cnibin\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.514898 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-hostroot\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.514902 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/845eca40-4a90-4dfb-b177-610dd3860602-cni-binary-copy\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.514928 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-var-lib-cni-multus\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: E1211 14:15:04.515011 4690 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 14:15:04 crc kubenswrapper[4690]: E1211 14:15:04.515057 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs podName:0cbb05eb-6650-45bc-ae3f-d29df5940583 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:05.015040185 +0000 UTC m=+36.630441828 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs") pod "network-metrics-daemon-r8sd9" (UID: "0cbb05eb-6650-45bc-ae3f-d29df5940583") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.515246 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/6860b472-f9ad-4fea-971b-44df4aa52606-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.515367 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/6860b472-f9ad-4fea-971b-44df4aa52606-system-cni-dir\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.515721 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/845eca40-4a90-4dfb-b177-610dd3860602-multus-daemon-config\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.515815 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/6860b472-f9ad-4fea-971b-44df4aa52606-cni-binary-copy\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.517362 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/6860b472-f9ad-4fea-971b-44df4aa52606-os-release\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.517463 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-run-netns\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.517505 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-host-run-k8s-cni-cncf-io\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.517688 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/44a7b31b-09dd-452b-87ba-29764eaa0206-proxy-tls\") pod \"machine-config-daemon-z9662\" (UID: \"44a7b31b-09dd-452b-87ba-29764eaa0206\") " pod="openshift-machine-config-operator/machine-config-daemon-z9662" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.517969 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/44a7b31b-09dd-452b-87ba-29764eaa0206-mcd-auth-proxy-config\") pod \"machine-config-daemon-z9662\" (UID: \"44a7b31b-09dd-452b-87ba-29764eaa0206\") " pod="openshift-machine-config-operator/machine-config-daemon-z9662" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.518053 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/6860b472-f9ad-4fea-971b-44df4aa52606-tuning-conf-dir\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.518492 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/845eca40-4a90-4dfb-b177-610dd3860602-multus-socket-dir-parent\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.544218 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnj8r\" (UniqueName: \"kubernetes.io/projected/0cbb05eb-6650-45bc-ae3f-d29df5940583-kube-api-access-fnj8r\") pod \"network-metrics-daemon-r8sd9\" (UID: \"0cbb05eb-6650-45bc-ae3f-d29df5940583\") " pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.549595 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44vkq\" (UniqueName: \"kubernetes.io/projected/845eca40-4a90-4dfb-b177-610dd3860602-kube-api-access-44vkq\") pod \"multus-nbps6\" (UID: \"845eca40-4a90-4dfb-b177-610dd3860602\") " pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.552117 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zd4j6\" (UniqueName: \"kubernetes.io/projected/44a7b31b-09dd-452b-87ba-29764eaa0206-kube-api-access-zd4j6\") pod \"machine-config-daemon-z9662\" (UID: \"44a7b31b-09dd-452b-87ba-29764eaa0206\") " pod="openshift-machine-config-operator/machine-config-daemon-z9662" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.569275 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-hrsvk"] Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.570191 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.572097 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.572244 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.573279 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.573484 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.573715 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.574170 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.574308 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.590020 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7dsd\" (UniqueName: \"kubernetes.io/projected/6860b472-f9ad-4fea-971b-44df4aa52606-kube-api-access-h7dsd\") pod \"multus-additional-cni-plugins-gzbqz\" (UID: \"6860b472-f9ad-4fea-971b-44df4aa52606\") " pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.626248 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-z9662" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.631942 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:15:04 crc kubenswrapper[4690]: E1211 14:15:04.632067 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.632554 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-nbps6" Dec 11 14:15:04 crc kubenswrapper[4690]: W1211 14:15:04.640889 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod44a7b31b_09dd_452b_87ba_29764eaa0206.slice/crio-3e259a1843ecca56463921fbaa380b4c6ec8a66d4f7744350b35d78e2f034bed WatchSource:0}: Error finding container 3e259a1843ecca56463921fbaa380b4c6ec8a66d4f7744350b35d78e2f034bed: Status 404 returned error can't find the container with id 3e259a1843ecca56463921fbaa380b4c6ec8a66d4f7744350b35d78e2f034bed Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.645709 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-gzbqz" Dec 11 14:15:04 crc kubenswrapper[4690]: W1211 14:15:04.653553 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod845eca40_4a90_4dfb_b177_610dd3860602.slice/crio-ef8af6e941d130238aac39c18ce2658d14d834c45ccece81c9ee665fde23eef2 WatchSource:0}: Error finding container ef8af6e941d130238aac39c18ce2658d14d834c45ccece81c9ee665fde23eef2: Status 404 returned error can't find the container with id ef8af6e941d130238aac39c18ce2658d14d834c45ccece81c9ee665fde23eef2 Dec 11 14:15:04 crc kubenswrapper[4690]: W1211 14:15:04.658681 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6860b472_f9ad_4fea_971b_44df4aa52606.slice/crio-43818fe7b4be2d21b206fc24694a18a7e5e2d6e5c0f29dc1eab06013c124fc14 WatchSource:0}: Error finding container 43818fe7b4be2d21b206fc24694a18a7e5e2d6e5c0f29dc1eab06013c124fc14: Status 404 returned error can't find the container with id 43818fe7b4be2d21b206fc24694a18a7e5e2d6e5c0f29dc1eab06013c124fc14 Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.715752 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-run-ovn-kubernetes\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.715790 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.715809 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-systemd-units\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.715826 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-env-overrides\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.715850 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-ovn-node-metrics-cert\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.715865 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-cni-bin\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.715886 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-slash\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.715900 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-run-ovn\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.715914 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-log-socket\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.716005 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4mm7\" (UniqueName: \"kubernetes.io/projected/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-kube-api-access-j4mm7\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.716036 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-kubelet\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.716054 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-run-systemd\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.716075 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-run-netns\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.716091 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-run-openvswitch\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.716106 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-cni-netd\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.716121 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-ovnkube-config\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.716136 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-ovnkube-script-lib\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.716157 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-var-lib-openvswitch\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.716171 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-etc-openvswitch\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.716184 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-node-log\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817182 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-node-log\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817222 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-var-lib-openvswitch\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817239 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-etc-openvswitch\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817258 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817276 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-run-ovn-kubernetes\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817298 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-systemd-units\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817312 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-env-overrides\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817329 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-ovn-node-metrics-cert\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817342 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-cni-bin\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817365 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-run-ovn\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817382 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-slash\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817405 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-log-socket\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817427 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4mm7\" (UniqueName: \"kubernetes.io/projected/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-kube-api-access-j4mm7\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817453 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-kubelet\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817468 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-run-systemd\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817484 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-cni-netd\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817498 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-ovnkube-config\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817519 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-run-netns\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817532 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-run-openvswitch\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.817550 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-ovnkube-script-lib\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.818147 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-run-ovn\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.818176 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-run-systemd\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.818204 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-run-ovn-kubernetes\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.818253 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-slash\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.818232 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-systemd-units\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.818296 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-run-netns\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.818317 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-run-openvswitch\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.818327 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-cni-netd\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.818259 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-ovnkube-script-lib\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.818283 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-kubelet\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.818349 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-etc-openvswitch\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.818380 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-node-log\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.818385 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-var-lib-openvswitch\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.818386 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.818219 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-log-socket\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.818425 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-host-cni-bin\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.818879 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-ovnkube-config\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.819345 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-env-overrides\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.821581 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-ovn-node-metrics-cert\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.841609 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4mm7\" (UniqueName: \"kubernetes.io/projected/4aebec7d-4e1a-46ea-a3a7-9263d91406ea-kube-api-access-j4mm7\") pod \"ovnkube-node-hrsvk\" (UID: \"4aebec7d-4e1a-46ea-a3a7-9263d91406ea\") " pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.908095 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.963063 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk"] Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.963445 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.965074 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 11 14:15:04 crc kubenswrapper[4690]: I1211 14:15:04.965098 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.020275 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs\") pod \"network-metrics-daemon-r8sd9\" (UID: \"0cbb05eb-6650-45bc-ae3f-d29df5940583\") " pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.020425 4690 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.020475 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs podName:0cbb05eb-6650-45bc-ae3f-d29df5940583 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:06.020460161 +0000 UTC m=+37.635861804 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs") pod "network-metrics-daemon-r8sd9" (UID: "0cbb05eb-6650-45bc-ae3f-d29df5940583") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.058209 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-qkg6f" event={"ID":"90bf1d67-4002-4bf2-b885-112e2b676f00","Type":"ContainerStarted","Data":"601cdc605b86f983d8e0b92a41767a0a2a1467418feb513ac456b95664a2946c"} Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.059022 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-gzbqz" event={"ID":"6860b472-f9ad-4fea-971b-44df4aa52606","Type":"ContainerStarted","Data":"43818fe7b4be2d21b206fc24694a18a7e5e2d6e5c0f29dc1eab06013c124fc14"} Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.059904 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-nbps6" event={"ID":"845eca40-4a90-4dfb-b177-610dd3860602","Type":"ContainerStarted","Data":"ef8af6e941d130238aac39c18ce2658d14d834c45ccece81c9ee665fde23eef2"} Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.060680 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" event={"ID":"4aebec7d-4e1a-46ea-a3a7-9263d91406ea","Type":"ContainerStarted","Data":"8dfb86ed46ad7078a63aa2f43dc6beca91bc6cb176aebc2d37015a0b1c971647"} Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.061355 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z9662" event={"ID":"44a7b31b-09dd-452b-87ba-29764eaa0206","Type":"ContainerStarted","Data":"3e259a1843ecca56463921fbaa380b4c6ec8a66d4f7744350b35d78e2f034bed"} Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.062514 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-8nnxw" event={"ID":"449308b0-3cfd-4d3c-890d-552ede5466e6","Type":"ContainerStarted","Data":"ebe399cb1a179a9a5c160c11baadade2c7bcbdb76d32ebc76ec6f43529a81b44"} Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.120754 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/705f17df-3730-4ce6-8049-05beb030d347-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-2cbhk\" (UID: \"705f17df-3730-4ce6-8049-05beb030d347\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.120807 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4f5q9\" (UniqueName: \"kubernetes.io/projected/705f17df-3730-4ce6-8049-05beb030d347-kube-api-access-4f5q9\") pod \"ovnkube-control-plane-749d76644c-2cbhk\" (UID: \"705f17df-3730-4ce6-8049-05beb030d347\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.120996 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/705f17df-3730-4ce6-8049-05beb030d347-env-overrides\") pod \"ovnkube-control-plane-749d76644c-2cbhk\" (UID: \"705f17df-3730-4ce6-8049-05beb030d347\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.121052 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/705f17df-3730-4ce6-8049-05beb030d347-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-2cbhk\" (UID: \"705f17df-3730-4ce6-8049-05beb030d347\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.133665 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.135827 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.135868 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.135879 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.136014 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.165782 4690 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.166124 4690 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.167051 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.167093 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.167107 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.167123 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.167135 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-11T14:15:05Z","lastTransitionTime":"2025-12-11T14:15:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.222295 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/705f17df-3730-4ce6-8049-05beb030d347-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-2cbhk\" (UID: \"705f17df-3730-4ce6-8049-05beb030d347\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.222630 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/705f17df-3730-4ce6-8049-05beb030d347-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-2cbhk\" (UID: \"705f17df-3730-4ce6-8049-05beb030d347\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.222658 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4f5q9\" (UniqueName: \"kubernetes.io/projected/705f17df-3730-4ce6-8049-05beb030d347-kube-api-access-4f5q9\") pod \"ovnkube-control-plane-749d76644c-2cbhk\" (UID: \"705f17df-3730-4ce6-8049-05beb030d347\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.222699 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/705f17df-3730-4ce6-8049-05beb030d347-env-overrides\") pod \"ovnkube-control-plane-749d76644c-2cbhk\" (UID: \"705f17df-3730-4ce6-8049-05beb030d347\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.223222 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/705f17df-3730-4ce6-8049-05beb030d347-env-overrides\") pod \"ovnkube-control-plane-749d76644c-2cbhk\" (UID: \"705f17df-3730-4ce6-8049-05beb030d347\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.223767 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/705f17df-3730-4ce6-8049-05beb030d347-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-2cbhk\" (UID: \"705f17df-3730-4ce6-8049-05beb030d347\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.227515 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/705f17df-3730-4ce6-8049-05beb030d347-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-2cbhk\" (UID: \"705f17df-3730-4ce6-8049-05beb030d347\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.269515 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4f5q9\" (UniqueName: \"kubernetes.io/projected/705f17df-3730-4ce6-8049-05beb030d347-kube-api-access-4f5q9\") pod \"ovnkube-control-plane-749d76644c-2cbhk\" (UID: \"705f17df-3730-4ce6-8049-05beb030d347\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.287546 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" Dec 11 14:15:05 crc kubenswrapper[4690]: W1211 14:15:05.302755 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod705f17df_3730_4ce6_8049_05beb030d347.slice/crio-d9dbf7daece60cc32ccec9b947a419b636c12781e57134a1004829c00e4836e9 WatchSource:0}: Error finding container d9dbf7daece60cc32ccec9b947a419b636c12781e57134a1004829c00e4836e9: Status 404 returned error can't find the container with id d9dbf7daece60cc32ccec9b947a419b636c12781e57134a1004829c00e4836e9 Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.327625 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg"] Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.328211 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.331358 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.331473 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.331478 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.333330 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.424593 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/71f0da52-20d3-43e5-81be-6cb2cfd58743-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-h2sbg\" (UID: \"71f0da52-20d3-43e5-81be-6cb2cfd58743\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.424640 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/71f0da52-20d3-43e5-81be-6cb2cfd58743-service-ca\") pod \"cluster-version-operator-5c965bbfc6-h2sbg\" (UID: \"71f0da52-20d3-43e5-81be-6cb2cfd58743\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.424687 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/71f0da52-20d3-43e5-81be-6cb2cfd58743-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-h2sbg\" (UID: \"71f0da52-20d3-43e5-81be-6cb2cfd58743\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.424714 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/71f0da52-20d3-43e5-81be-6cb2cfd58743-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-h2sbg\" (UID: \"71f0da52-20d3-43e5-81be-6cb2cfd58743\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.424927 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71f0da52-20d3-43e5-81be-6cb2cfd58743-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-h2sbg\" (UID: \"71f0da52-20d3-43e5-81be-6cb2cfd58743\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.526408 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71f0da52-20d3-43e5-81be-6cb2cfd58743-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-h2sbg\" (UID: \"71f0da52-20d3-43e5-81be-6cb2cfd58743\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.526532 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/71f0da52-20d3-43e5-81be-6cb2cfd58743-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-h2sbg\" (UID: \"71f0da52-20d3-43e5-81be-6cb2cfd58743\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.526564 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/71f0da52-20d3-43e5-81be-6cb2cfd58743-service-ca\") pod \"cluster-version-operator-5c965bbfc6-h2sbg\" (UID: \"71f0da52-20d3-43e5-81be-6cb2cfd58743\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.526597 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/71f0da52-20d3-43e5-81be-6cb2cfd58743-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-h2sbg\" (UID: \"71f0da52-20d3-43e5-81be-6cb2cfd58743\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.526638 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/71f0da52-20d3-43e5-81be-6cb2cfd58743-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-h2sbg\" (UID: \"71f0da52-20d3-43e5-81be-6cb2cfd58743\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.526689 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/71f0da52-20d3-43e5-81be-6cb2cfd58743-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-h2sbg\" (UID: \"71f0da52-20d3-43e5-81be-6cb2cfd58743\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.526780 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/71f0da52-20d3-43e5-81be-6cb2cfd58743-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-h2sbg\" (UID: \"71f0da52-20d3-43e5-81be-6cb2cfd58743\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.527536 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/71f0da52-20d3-43e5-81be-6cb2cfd58743-service-ca\") pod \"cluster-version-operator-5c965bbfc6-h2sbg\" (UID: \"71f0da52-20d3-43e5-81be-6cb2cfd58743\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.530312 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71f0da52-20d3-43e5-81be-6cb2cfd58743-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-h2sbg\" (UID: \"71f0da52-20d3-43e5-81be-6cb2cfd58743\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.541609 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/71f0da52-20d3-43e5-81be-6cb2cfd58743-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-h2sbg\" (UID: \"71f0da52-20d3-43e5-81be-6cb2cfd58743\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.630764 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.630876 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.630970 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.631003 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.631096 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r8sd9" podUID="0cbb05eb-6650-45bc-ae3f-d29df5940583" Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.631187 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.672827 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" Dec 11 14:15:05 crc kubenswrapper[4690]: W1211 14:15:05.683759 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71f0da52_20d3_43e5_81be_6cb2cfd58743.slice/crio-fa39c7e19431e0d641519b33d97d9740f109ae92ef7725096b8517dbeeeef4d9 WatchSource:0}: Error finding container fa39c7e19431e0d641519b33d97d9740f109ae92ef7725096b8517dbeeeef4d9: Status 404 returned error can't find the container with id fa39c7e19431e0d641519b33d97d9740f109ae92ef7725096b8517dbeeeef4d9 Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.828244 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.828363 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.828474 4690 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.828477 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:13.828436822 +0000 UTC m=+45.443838475 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.828557 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:13.828540454 +0000 UTC m=+45.443942107 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.828668 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.828840 4690 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.828912 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:13.828900454 +0000 UTC m=+45.444302147 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.929708 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:15:05 crc kubenswrapper[4690]: I1211 14:15:05.929778 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.929908 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.929924 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.929935 4690 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.929933 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.929995 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.930015 4690 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.929996 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:13.929982681 +0000 UTC m=+45.545384324 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:15:05 crc kubenswrapper[4690]: E1211 14:15:05.930097 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:13.930077903 +0000 UTC m=+45.545479606 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.030894 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs\") pod \"network-metrics-daemon-r8sd9\" (UID: \"0cbb05eb-6650-45bc-ae3f-d29df5940583\") " pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:06 crc kubenswrapper[4690]: E1211 14:15:06.031122 4690 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 14:15:06 crc kubenswrapper[4690]: E1211 14:15:06.031496 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs podName:0cbb05eb-6650-45bc-ae3f-d29df5940583 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:08.031476658 +0000 UTC m=+39.646878301 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs") pod "network-metrics-daemon-r8sd9" (UID: "0cbb05eb-6650-45bc-ae3f-d29df5940583") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.066692 4690 generic.go:334] "Generic (PLEG): container finished" podID="4aebec7d-4e1a-46ea-a3a7-9263d91406ea" containerID="cd394e1b379e38eead708fd34e162674125b61d1c161d7dbc8b2ee2f1964a6e4" exitCode=0 Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.066764 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" event={"ID":"4aebec7d-4e1a-46ea-a3a7-9263d91406ea","Type":"ContainerDied","Data":"cd394e1b379e38eead708fd34e162674125b61d1c161d7dbc8b2ee2f1964a6e4"} Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.070078 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z9662" event={"ID":"44a7b31b-09dd-452b-87ba-29764eaa0206","Type":"ContainerStarted","Data":"f926d5fd95ff8197d23790758b699718b9126dc257a86231a1b9064c2de99d34"} Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.070138 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z9662" event={"ID":"44a7b31b-09dd-452b-87ba-29764eaa0206","Type":"ContainerStarted","Data":"0e4832b2b6dce8997cd247f1916ab6a197fa3f64006449b295e150efb8bd4eb7"} Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.079143 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-8nnxw" event={"ID":"449308b0-3cfd-4d3c-890d-552ede5466e6","Type":"ContainerStarted","Data":"c4edc0d49a05566c3ecde310db37164f9244aaf9c0726845c7b5d73a52f1f758"} Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.089998 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-qkg6f" event={"ID":"90bf1d67-4002-4bf2-b885-112e2b676f00","Type":"ContainerStarted","Data":"5334b70e1aea3100261463965ea06f5c02ddbdef4fee691d3177d15ef6841862"} Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.092900 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" event={"ID":"705f17df-3730-4ce6-8049-05beb030d347","Type":"ContainerStarted","Data":"adf062a008ddab3d2a23abcf77ea64943cd9cadfc5136002b0a9ad1475ea10ea"} Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.092942 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" event={"ID":"705f17df-3730-4ce6-8049-05beb030d347","Type":"ContainerStarted","Data":"6c24755284b8e13b90826ef74cdec988bbca0ef8d2c966cdb167a85874af171b"} Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.092975 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" event={"ID":"705f17df-3730-4ce6-8049-05beb030d347","Type":"ContainerStarted","Data":"d9dbf7daece60cc32ccec9b947a419b636c12781e57134a1004829c00e4836e9"} Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.094705 4690 generic.go:334] "Generic (PLEG): container finished" podID="6860b472-f9ad-4fea-971b-44df4aa52606" containerID="b5cf75517ae872ed454032c1d6a6e84cae3829e34b073ca6f8881129b29cd023" exitCode=0 Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.094762 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-gzbqz" event={"ID":"6860b472-f9ad-4fea-971b-44df4aa52606","Type":"ContainerDied","Data":"b5cf75517ae872ed454032c1d6a6e84cae3829e34b073ca6f8881129b29cd023"} Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.098809 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" event={"ID":"71f0da52-20d3-43e5-81be-6cb2cfd58743","Type":"ContainerStarted","Data":"0403a93e950b8c3299c62987f238c27de621a35ffd60dfe278159b00c87804ee"} Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.098848 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" event={"ID":"71f0da52-20d3-43e5-81be-6cb2cfd58743","Type":"ContainerStarted","Data":"fa39c7e19431e0d641519b33d97d9740f109ae92ef7725096b8517dbeeeef4d9"} Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.101700 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-nbps6" event={"ID":"845eca40-4a90-4dfb-b177-610dd3860602","Type":"ContainerStarted","Data":"2a6220d5b21de3edbbc714b2330572e012d8b868eae9951ecf9a11d43a158cca"} Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.155619 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-nbps6" podStartSLOduration=2.155597318 podStartE2EDuration="2.155597318s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:06.128746128 +0000 UTC m=+37.744147791" watchObservedRunningTime="2025-12-11 14:15:06.155597318 +0000 UTC m=+37.770998961" Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.175819 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-2cbhk" podStartSLOduration=2.175796319 podStartE2EDuration="2.175796319s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:06.175392638 +0000 UTC m=+37.790794281" watchObservedRunningTime="2025-12-11 14:15:06.175796319 +0000 UTC m=+37.791197962" Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.191328 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-h2sbg" podStartSLOduration=2.191302231 podStartE2EDuration="2.191302231s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:06.189938686 +0000 UTC m=+37.805340339" watchObservedRunningTime="2025-12-11 14:15:06.191302231 +0000 UTC m=+37.806703874" Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.213286 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-8nnxw" podStartSLOduration=2.213265757 podStartE2EDuration="2.213265757s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:06.212588379 +0000 UTC m=+37.827990032" watchObservedRunningTime="2025-12-11 14:15:06.213265757 +0000 UTC m=+37.828667400" Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.230621 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-qkg6f" podStartSLOduration=2.230606395 podStartE2EDuration="2.230606395s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:06.230296857 +0000 UTC m=+37.845698490" watchObservedRunningTime="2025-12-11 14:15:06.230606395 +0000 UTC m=+37.846008038" Dec 11 14:15:06 crc kubenswrapper[4690]: I1211 14:15:06.630409 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:15:06 crc kubenswrapper[4690]: E1211 14:15:06.630531 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 14:15:07 crc kubenswrapper[4690]: I1211 14:15:07.106904 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" event={"ID":"4aebec7d-4e1a-46ea-a3a7-9263d91406ea","Type":"ContainerStarted","Data":"f0d35334ffbcda818c44fdaa0d481acf6b16b7f733acaf55aebebad7d8e51d70"} Dec 11 14:15:07 crc kubenswrapper[4690]: I1211 14:15:07.107204 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" event={"ID":"4aebec7d-4e1a-46ea-a3a7-9263d91406ea","Type":"ContainerStarted","Data":"b3822580243b357e5b8b743e4b6ad5684ed3ce73b9f1db500fcc4f2e83615d55"} Dec 11 14:15:07 crc kubenswrapper[4690]: I1211 14:15:07.107216 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" event={"ID":"4aebec7d-4e1a-46ea-a3a7-9263d91406ea","Type":"ContainerStarted","Data":"174a6eb5309854ad48f40c2c384b281dc4e640dc9fc21330629608934de956db"} Dec 11 14:15:07 crc kubenswrapper[4690]: I1211 14:15:07.109572 4690 generic.go:334] "Generic (PLEG): container finished" podID="6860b472-f9ad-4fea-971b-44df4aa52606" containerID="1fa027412a8f22e70bd264f702261c9d57a06dcaca6fc8c1576fa6623a746cb3" exitCode=0 Dec 11 14:15:07 crc kubenswrapper[4690]: I1211 14:15:07.109662 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-gzbqz" event={"ID":"6860b472-f9ad-4fea-971b-44df4aa52606","Type":"ContainerDied","Data":"1fa027412a8f22e70bd264f702261c9d57a06dcaca6fc8c1576fa6623a746cb3"} Dec 11 14:15:07 crc kubenswrapper[4690]: I1211 14:15:07.129522 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-z9662" podStartSLOduration=3.129502326 podStartE2EDuration="3.129502326s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:06.246679942 +0000 UTC m=+37.862081585" watchObservedRunningTime="2025-12-11 14:15:07.129502326 +0000 UTC m=+38.744903969" Dec 11 14:15:07 crc kubenswrapper[4690]: I1211 14:15:07.629976 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:07 crc kubenswrapper[4690]: I1211 14:15:07.630036 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:07 crc kubenswrapper[4690]: I1211 14:15:07.630068 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:15:07 crc kubenswrapper[4690]: E1211 14:15:07.630113 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r8sd9" podUID="0cbb05eb-6650-45bc-ae3f-d29df5940583" Dec 11 14:15:07 crc kubenswrapper[4690]: E1211 14:15:07.630176 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 14:15:07 crc kubenswrapper[4690]: E1211 14:15:07.630342 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 14:15:08 crc kubenswrapper[4690]: I1211 14:15:08.054231 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs\") pod \"network-metrics-daemon-r8sd9\" (UID: \"0cbb05eb-6650-45bc-ae3f-d29df5940583\") " pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:08 crc kubenswrapper[4690]: E1211 14:15:08.054464 4690 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 14:15:08 crc kubenswrapper[4690]: E1211 14:15:08.054719 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs podName:0cbb05eb-6650-45bc-ae3f-d29df5940583 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:12.054697302 +0000 UTC m=+43.670098935 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs") pod "network-metrics-daemon-r8sd9" (UID: "0cbb05eb-6650-45bc-ae3f-d29df5940583") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 14:15:08 crc kubenswrapper[4690]: I1211 14:15:08.116711 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" event={"ID":"4aebec7d-4e1a-46ea-a3a7-9263d91406ea","Type":"ContainerStarted","Data":"d62eba5d743f47b5ac6be8ec6e242f14decf98ff5011433a395d7f75843f121d"} Dec 11 14:15:08 crc kubenswrapper[4690]: I1211 14:15:08.116752 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" event={"ID":"4aebec7d-4e1a-46ea-a3a7-9263d91406ea","Type":"ContainerStarted","Data":"fe80fe387afd0452c62379aa34dcb45817f1c6319f062e2e1bc643603bee52fe"} Dec 11 14:15:08 crc kubenswrapper[4690]: I1211 14:15:08.116762 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" event={"ID":"4aebec7d-4e1a-46ea-a3a7-9263d91406ea","Type":"ContainerStarted","Data":"88faa8d658f559d67d56080958401dc0c8411bf826f3e3f856724a7f1842a092"} Dec 11 14:15:08 crc kubenswrapper[4690]: I1211 14:15:08.118701 4690 generic.go:334] "Generic (PLEG): container finished" podID="6860b472-f9ad-4fea-971b-44df4aa52606" containerID="e37fc127bd10e403818bc743dcd2f183ae3e4c5f1da9281ec24a6d1485c8b980" exitCode=0 Dec 11 14:15:08 crc kubenswrapper[4690]: I1211 14:15:08.118729 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-gzbqz" event={"ID":"6860b472-f9ad-4fea-971b-44df4aa52606","Type":"ContainerDied","Data":"e37fc127bd10e403818bc743dcd2f183ae3e4c5f1da9281ec24a6d1485c8b980"} Dec 11 14:15:08 crc kubenswrapper[4690]: I1211 14:15:08.630626 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:15:08 crc kubenswrapper[4690]: E1211 14:15:08.636810 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 14:15:09 crc kubenswrapper[4690]: I1211 14:15:09.124780 4690 generic.go:334] "Generic (PLEG): container finished" podID="6860b472-f9ad-4fea-971b-44df4aa52606" containerID="14368527411e4b15461f774e0735d347c6f9cd3d65a94a982fbd870acf248343" exitCode=0 Dec 11 14:15:09 crc kubenswrapper[4690]: I1211 14:15:09.124824 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-gzbqz" event={"ID":"6860b472-f9ad-4fea-971b-44df4aa52606","Type":"ContainerDied","Data":"14368527411e4b15461f774e0735d347c6f9cd3d65a94a982fbd870acf248343"} Dec 11 14:15:09 crc kubenswrapper[4690]: I1211 14:15:09.629878 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:15:09 crc kubenswrapper[4690]: I1211 14:15:09.629879 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:09 crc kubenswrapper[4690]: E1211 14:15:09.630036 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 14:15:09 crc kubenswrapper[4690]: I1211 14:15:09.629879 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:09 crc kubenswrapper[4690]: E1211 14:15:09.630095 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 14:15:09 crc kubenswrapper[4690]: E1211 14:15:09.630167 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r8sd9" podUID="0cbb05eb-6650-45bc-ae3f-d29df5940583" Dec 11 14:15:10 crc kubenswrapper[4690]: I1211 14:15:10.130019 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-gzbqz" event={"ID":"6860b472-f9ad-4fea-971b-44df4aa52606","Type":"ContainerStarted","Data":"73ea37041023107fc9f95f2660830db5b1f7806e7726a0ab4c081b7fd828730b"} Dec 11 14:15:10 crc kubenswrapper[4690]: I1211 14:15:10.630903 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:15:10 crc kubenswrapper[4690]: E1211 14:15:10.631088 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 14:15:11 crc kubenswrapper[4690]: I1211 14:15:11.136051 4690 generic.go:334] "Generic (PLEG): container finished" podID="6860b472-f9ad-4fea-971b-44df4aa52606" containerID="73ea37041023107fc9f95f2660830db5b1f7806e7726a0ab4c081b7fd828730b" exitCode=0 Dec 11 14:15:11 crc kubenswrapper[4690]: I1211 14:15:11.136109 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-gzbqz" event={"ID":"6860b472-f9ad-4fea-971b-44df4aa52606","Type":"ContainerDied","Data":"73ea37041023107fc9f95f2660830db5b1f7806e7726a0ab4c081b7fd828730b"} Dec 11 14:15:11 crc kubenswrapper[4690]: I1211 14:15:11.140838 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" event={"ID":"4aebec7d-4e1a-46ea-a3a7-9263d91406ea","Type":"ContainerStarted","Data":"3c780dec3632ff6c226fc0d17d08bf54dd11c28913d247cee2c81451c9cdb5b8"} Dec 11 14:15:11 crc kubenswrapper[4690]: I1211 14:15:11.630165 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:11 crc kubenswrapper[4690]: I1211 14:15:11.630227 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:11 crc kubenswrapper[4690]: E1211 14:15:11.630276 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 14:15:11 crc kubenswrapper[4690]: I1211 14:15:11.630178 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:15:11 crc kubenswrapper[4690]: E1211 14:15:11.630352 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r8sd9" podUID="0cbb05eb-6650-45bc-ae3f-d29df5940583" Dec 11 14:15:11 crc kubenswrapper[4690]: E1211 14:15:11.630419 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 14:15:12 crc kubenswrapper[4690]: I1211 14:15:12.098420 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs\") pod \"network-metrics-daemon-r8sd9\" (UID: \"0cbb05eb-6650-45bc-ae3f-d29df5940583\") " pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:12 crc kubenswrapper[4690]: E1211 14:15:12.098565 4690 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 14:15:12 crc kubenswrapper[4690]: E1211 14:15:12.098650 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs podName:0cbb05eb-6650-45bc-ae3f-d29df5940583 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:20.098633308 +0000 UTC m=+51.714034951 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs") pod "network-metrics-daemon-r8sd9" (UID: "0cbb05eb-6650-45bc-ae3f-d29df5940583") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 11 14:15:12 crc kubenswrapper[4690]: I1211 14:15:12.147310 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-gzbqz" event={"ID":"6860b472-f9ad-4fea-971b-44df4aa52606","Type":"ContainerStarted","Data":"b1b164e04061a670484bd8a3152331ece1f5d8a2b55fa0f158eaf51c20ca20f2"} Dec 11 14:15:12 crc kubenswrapper[4690]: I1211 14:15:12.460781 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:15:12 crc kubenswrapper[4690]: I1211 14:15:12.630007 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:15:12 crc kubenswrapper[4690]: E1211 14:15:12.630130 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 14:15:13 crc kubenswrapper[4690]: I1211 14:15:13.157310 4690 generic.go:334] "Generic (PLEG): container finished" podID="6860b472-f9ad-4fea-971b-44df4aa52606" containerID="b1b164e04061a670484bd8a3152331ece1f5d8a2b55fa0f158eaf51c20ca20f2" exitCode=0 Dec 11 14:15:13 crc kubenswrapper[4690]: I1211 14:15:13.157373 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-gzbqz" event={"ID":"6860b472-f9ad-4fea-971b-44df4aa52606","Type":"ContainerDied","Data":"b1b164e04061a670484bd8a3152331ece1f5d8a2b55fa0f158eaf51c20ca20f2"} Dec 11 14:15:13 crc kubenswrapper[4690]: I1211 14:15:13.630740 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:13 crc kubenswrapper[4690]: I1211 14:15:13.630782 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:13 crc kubenswrapper[4690]: E1211 14:15:13.631266 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 14:15:13 crc kubenswrapper[4690]: E1211 14:15:13.631350 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r8sd9" podUID="0cbb05eb-6650-45bc-ae3f-d29df5940583" Dec 11 14:15:13 crc kubenswrapper[4690]: I1211 14:15:13.630810 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:15:13 crc kubenswrapper[4690]: E1211 14:15:13.631551 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 14:15:13 crc kubenswrapper[4690]: I1211 14:15:13.917296 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:13 crc kubenswrapper[4690]: I1211 14:15:13.917440 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:13 crc kubenswrapper[4690]: I1211 14:15:13.917476 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:13 crc kubenswrapper[4690]: E1211 14:15:13.917550 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:29.917512512 +0000 UTC m=+61.532914175 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:13 crc kubenswrapper[4690]: E1211 14:15:13.917572 4690 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 14:15:13 crc kubenswrapper[4690]: E1211 14:15:13.917639 4690 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 14:15:13 crc kubenswrapper[4690]: E1211 14:15:13.917656 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:29.917647005 +0000 UTC m=+61.533048648 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 11 14:15:13 crc kubenswrapper[4690]: E1211 14:15:13.917738 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:29.917715077 +0000 UTC m=+61.533116780 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 11 14:15:14 crc kubenswrapper[4690]: I1211 14:15:14.018906 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:15:14 crc kubenswrapper[4690]: I1211 14:15:14.019016 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:15:14 crc kubenswrapper[4690]: E1211 14:15:14.019152 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 14:15:14 crc kubenswrapper[4690]: E1211 14:15:14.019173 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 14:15:14 crc kubenswrapper[4690]: E1211 14:15:14.019186 4690 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:15:14 crc kubenswrapper[4690]: E1211 14:15:14.019204 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 11 14:15:14 crc kubenswrapper[4690]: E1211 14:15:14.019244 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:30.019228025 +0000 UTC m=+61.634629668 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:15:14 crc kubenswrapper[4690]: E1211 14:15:14.019246 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 11 14:15:14 crc kubenswrapper[4690]: E1211 14:15:14.019265 4690 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:15:14 crc kubenswrapper[4690]: E1211 14:15:14.019328 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:30.019310527 +0000 UTC m=+61.634712160 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 11 14:15:14 crc kubenswrapper[4690]: I1211 14:15:14.163514 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" event={"ID":"4aebec7d-4e1a-46ea-a3a7-9263d91406ea","Type":"ContainerStarted","Data":"83e34678d44ca6eb6b516df55f08b70a31303319565d0186a425fd68f9726625"} Dec 11 14:15:14 crc kubenswrapper[4690]: I1211 14:15:14.163844 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:14 crc kubenswrapper[4690]: I1211 14:15:14.163874 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:14 crc kubenswrapper[4690]: I1211 14:15:14.172897 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-gzbqz" event={"ID":"6860b472-f9ad-4fea-971b-44df4aa52606","Type":"ContainerStarted","Data":"528669bae82e55d91b09dd860660e3c2fdea8a05a227803844323c84a6bcc934"} Dec 11 14:15:14 crc kubenswrapper[4690]: I1211 14:15:14.189531 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:14 crc kubenswrapper[4690]: I1211 14:15:14.213066 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-gzbqz" podStartSLOduration=10.213049508 podStartE2EDuration="10.213049508s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:14.212475314 +0000 UTC m=+45.827876957" watchObservedRunningTime="2025-12-11 14:15:14.213049508 +0000 UTC m=+45.828451151" Dec 11 14:15:14 crc kubenswrapper[4690]: I1211 14:15:14.213377 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" podStartSLOduration=10.213371657 podStartE2EDuration="10.213371657s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:14.195849633 +0000 UTC m=+45.811251296" watchObservedRunningTime="2025-12-11 14:15:14.213371657 +0000 UTC m=+45.828773310" Dec 11 14:15:14 crc kubenswrapper[4690]: I1211 14:15:14.630350 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:15:14 crc kubenswrapper[4690]: E1211 14:15:14.630461 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 14:15:14 crc kubenswrapper[4690]: I1211 14:15:14.909545 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:14 crc kubenswrapper[4690]: I1211 14:15:14.938661 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:15 crc kubenswrapper[4690]: I1211 14:15:15.630814 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:15:15 crc kubenswrapper[4690]: E1211 14:15:15.631086 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 14:15:15 crc kubenswrapper[4690]: I1211 14:15:15.631156 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:15 crc kubenswrapper[4690]: I1211 14:15:15.631254 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:15 crc kubenswrapper[4690]: E1211 14:15:15.631369 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 14:15:15 crc kubenswrapper[4690]: E1211 14:15:15.631557 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r8sd9" podUID="0cbb05eb-6650-45bc-ae3f-d29df5940583" Dec 11 14:15:16 crc kubenswrapper[4690]: I1211 14:15:16.013312 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-r8sd9"] Dec 11 14:15:16 crc kubenswrapper[4690]: I1211 14:15:16.178463 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:16 crc kubenswrapper[4690]: E1211 14:15:16.178580 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r8sd9" podUID="0cbb05eb-6650-45bc-ae3f-d29df5940583" Dec 11 14:15:16 crc kubenswrapper[4690]: I1211 14:15:16.630807 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:15:16 crc kubenswrapper[4690]: E1211 14:15:16.630974 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 14:15:17 crc kubenswrapper[4690]: I1211 14:15:17.630234 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:15:17 crc kubenswrapper[4690]: I1211 14:15:17.630292 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:17 crc kubenswrapper[4690]: I1211 14:15:17.630341 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:17 crc kubenswrapper[4690]: E1211 14:15:17.630392 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 11 14:15:17 crc kubenswrapper[4690]: E1211 14:15:17.630469 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-r8sd9" podUID="0cbb05eb-6650-45bc-ae3f-d29df5940583" Dec 11 14:15:17 crc kubenswrapper[4690]: E1211 14:15:17.630710 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 11 14:15:18 crc kubenswrapper[4690]: I1211 14:15:18.630979 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:15:18 crc kubenswrapper[4690]: E1211 14:15:18.631878 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.283767 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.284260 4690 kubelet_node_status.go:538] "Fast updating node status as it just became ready" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.314870 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.315205 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.317410 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.317733 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.319076 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.319170 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.319271 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.320078 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.320340 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.320478 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.320725 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.321003 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.321079 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.321002 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-crxzj"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.321579 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.321940 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.322188 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.322346 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.322446 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.322615 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.322623 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.323330 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bfp74"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.323659 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bfp74" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.324911 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-czjn5"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.325223 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.325508 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.325691 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.326069 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.326217 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.326632 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.327384 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.329046 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-rb4j9"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.329561 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.334090 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.334600 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4gxkn"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.334976 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8l25g"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.335028 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.335212 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.335481 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.335549 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8l25g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.339589 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.339813 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.339830 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4gxkn" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.340158 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.340408 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.343130 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.343377 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.343555 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.343758 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.343775 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.343922 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.344005 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.344363 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.345171 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-8klv5"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.345783 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-8klv5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.356452 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.356782 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.357044 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.357211 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.357344 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.357502 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.357658 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.357990 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v47hs"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.358443 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.358923 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-r56mp"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.359576 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.360162 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.366428 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.366630 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.366936 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.367088 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.367159 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.367231 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.367306 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.367322 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.367345 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.367460 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.367501 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.367560 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.367577 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.367705 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.367714 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.367737 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.367800 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.368641 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9wx9g"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.369412 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.374709 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.376170 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.376365 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.376794 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.376847 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.378347 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.378830 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-q2v54"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.378934 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.379114 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.379230 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.379257 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-77vtz"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.379329 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.379485 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.379547 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.379611 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.379643 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-q2v54" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.379722 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.379814 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.379932 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.380159 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.380184 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.380345 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.380351 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.380438 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.379616 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.380654 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.392387 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.393528 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.393807 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.393887 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.394040 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9h2v4"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.394333 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.394399 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5n4lj"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.394637 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.394648 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.394668 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5n4lj" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.394774 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.394860 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9h2v4" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.394967 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.396014 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.400614 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.403156 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.403498 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.403868 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.404372 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.407709 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.423246 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.423501 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.430513 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-c5zwr"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.432746 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nmql7"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.433461 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-jzm89"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.435253 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-z8tth"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.435763 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8l25g"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.435921 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-z8tth" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.436208 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.430627 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.436505 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.436739 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nmql7" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.437024 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.433294 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.433430 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.440366 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.442297 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.442806 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.444127 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.444830 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.448037 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-czjn5"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.449198 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4lfzc"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.449737 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4lfzc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.450351 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.453665 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zl7lm"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.454278 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zl7lm" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.454591 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.455179 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.456628 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.457208 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.457658 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.458193 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bfp74"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.459163 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6mnd7"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.460152 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mnd7" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.461053 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-zsqv7"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.461571 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-zsqv7" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.462094 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-sgjk8"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.462664 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-sgjk8" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.463773 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r2jbn"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.508812 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.510837 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.511828 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-zrnhb\" (UID: \"24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.511881 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9211752e-9c0b-43ea-9c4d-5d91fcb472db-service-ca-bundle\") pod \"router-default-5444994796-jzm89\" (UID: \"9211752e-9c0b-43ea-9c4d-5d91fcb472db\") " pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.511925 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ecd7785-d164-4998-a361-031cd179f164-serving-cert\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.511945 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f1e1cca-4214-4752-8bc4-1972df6de928-config\") pod \"machine-api-operator-5694c8668f-crxzj\" (UID: \"7f1e1cca-4214-4752-8bc4-1972df6de928\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.511980 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-etcd-client\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.511996 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr54t\" (UniqueName: \"kubernetes.io/projected/24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab-kube-api-access-pr54t\") pod \"cluster-image-registry-operator-dc59b4c8b-zrnhb\" (UID: \"24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512157 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9211752e-9c0b-43ea-9c4d-5d91fcb472db-metrics-certs\") pod \"router-default-5444994796-jzm89\" (UID: \"9211752e-9c0b-43ea-9c4d-5d91fcb472db\") " pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512238 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-trusted-ca-bundle\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512286 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512320 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mn7c\" (UniqueName: \"kubernetes.io/projected/e8f5b2a0-33a0-46b8-9f75-6d732599e8e1-kube-api-access-7mn7c\") pod \"cluster-samples-operator-665b6dd947-4gxkn\" (UID: \"e8f5b2a0-33a0-46b8-9f75-6d732599e8e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4gxkn" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512353 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fbdc74c8-8b96-479b-b06c-637acb1bb68a-console-config\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512383 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/57d5b2c5-8c4a-410b-9acf-94852865a8d6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-9h2v4\" (UID: \"57d5b2c5-8c4a-410b-9acf-94852865a8d6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9h2v4" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512409 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff1e4a4f-2591-46a6-b478-7cef472306c3-serving-cert\") pod \"openshift-config-operator-7777fb866f-8l25g\" (UID: \"ff1e4a4f-2591-46a6-b478-7cef472306c3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8l25g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512445 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512568 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/3455abcd-3a0d-4376-908c-81484c62002a-machine-approver-tls\") pod \"machine-approver-56656f9798-h2qcw\" (UID: \"3455abcd-3a0d-4376-908c-81484c62002a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512614 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9dbcf64e-599d-458f-a5cd-92f58deaa813-trusted-ca\") pod \"console-operator-58897d9998-q2v54\" (UID: \"9dbcf64e-599d-458f-a5cd-92f58deaa813\") " pod="openshift-console-operator/console-operator-58897d9998-q2v54" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512642 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512666 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nn6kq\" (UniqueName: \"kubernetes.io/projected/f0fc8c25-f7b2-4f7b-8ea7-323e91557a24-kube-api-access-nn6kq\") pod \"migrator-59844c95c7-6mnd7\" (UID: \"f0fc8c25-f7b2-4f7b-8ea7-323e91557a24\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mnd7" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512699 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d5b2c5-8c4a-410b-9acf-94852865a8d6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-9h2v4\" (UID: \"57d5b2c5-8c4a-410b-9acf-94852865a8d6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9h2v4" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512756 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-audit-dir\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512789 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac076d6f-30a7-4230-8320-936d709bf640-service-ca-bundle\") pod \"authentication-operator-69f744f599-czjn5\" (UID: \"ac076d6f-30a7-4230-8320-936d709bf640\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512810 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512851 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08650951-b99f-42e0-8321-9b1b9560f1cb-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-4lfzc\" (UID: \"08650951-b99f-42e0-8321-9b1b9560f1cb\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4lfzc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512881 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512914 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-encryption-config\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.512973 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzkr7\" (UniqueName: \"kubernetes.io/projected/4ecd7785-d164-4998-a361-031cd179f164-kube-api-access-kzkr7\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513001 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-serving-cert\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513034 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3414e867-9935-42f6-9de9-853252ee06d3-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-bfp74\" (UID: \"3414e867-9935-42f6-9de9-853252ee06d3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bfp74" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513053 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/9211752e-9c0b-43ea-9c4d-5d91fcb472db-default-certificate\") pod \"router-default-5444994796-jzm89\" (UID: \"9211752e-9c0b-43ea-9c4d-5d91fcb472db\") " pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513078 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3c1f84bf-fc2e-465a-b3e1-436eb29e50ab-proxy-tls\") pod \"machine-config-controller-84d6567774-8gzc6\" (UID: \"3c1f84bf-fc2e-465a-b3e1-436eb29e50ab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513098 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513116 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-client-ca\") pod \"controller-manager-879f6c89f-v47hs\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513134 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513166 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jk66\" (UniqueName: \"kubernetes.io/projected/9dbcf64e-599d-458f-a5cd-92f58deaa813-kube-api-access-6jk66\") pod \"console-operator-58897d9998-q2v54\" (UID: \"9dbcf64e-599d-458f-a5cd-92f58deaa813\") " pod="openshift-console-operator/console-operator-58897d9998-q2v54" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513194 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513220 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fbdc74c8-8b96-479b-b06c-637acb1bb68a-oauth-serving-cert\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513247 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6568e808-b0c8-4d3a-b454-51b9c810f2a3-srv-cert\") pod \"catalog-operator-68c6474976-j2v2w\" (UID: \"6568e808-b0c8-4d3a-b454-51b9c810f2a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513275 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac076d6f-30a7-4230-8320-936d709bf640-serving-cert\") pod \"authentication-operator-69f744f599-czjn5\" (UID: \"ac076d6f-30a7-4230-8320-936d709bf640\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513305 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-zrnhb\" (UID: \"24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513346 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fbdc74c8-8b96-479b-b06c-637acb1bb68a-console-oauth-config\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513374 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nq6fd\" (UniqueName: \"kubernetes.io/projected/6568e808-b0c8-4d3a-b454-51b9c810f2a3-kube-api-access-nq6fd\") pod \"catalog-operator-68c6474976-j2v2w\" (UID: \"6568e808-b0c8-4d3a-b454-51b9c810f2a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513403 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-serving-cert\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513424 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bnslm\" (UniqueName: \"kubernetes.io/projected/3414e867-9935-42f6-9de9-853252ee06d3-kube-api-access-bnslm\") pod \"openshift-apiserver-operator-796bbdcf4f-bfp74\" (UID: \"3414e867-9935-42f6-9de9-853252ee06d3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bfp74" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513460 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-audit-policies\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513529 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513570 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fbdc74c8-8b96-479b-b06c-637acb1bb68a-service-ca\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513602 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7c2d\" (UniqueName: \"kubernetes.io/projected/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-kube-api-access-f7c2d\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513626 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcx4v\" (UniqueName: \"kubernetes.io/projected/57d5b2c5-8c4a-410b-9acf-94852865a8d6-kube-api-access-vcx4v\") pod \"openshift-controller-manager-operator-756b6f6bc6-9h2v4\" (UID: \"57d5b2c5-8c4a-410b-9acf-94852865a8d6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9h2v4" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513664 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e221469-9c10-4c08-925b-a4e4e4c3d208-config\") pod \"route-controller-manager-6576b87f9c-tlndc\" (UID: \"2e221469-9c10-4c08-925b-a4e4e4c3d208\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513691 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.513913 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4ecd7785-d164-4998-a361-031cd179f164-etcd-client\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.514082 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.514121 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/4ecd7785-d164-4998-a361-031cd179f164-etcd-ca\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.514202 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwdjn\" (UniqueName: \"kubernetes.io/projected/08650951-b99f-42e0-8321-9b1b9560f1cb-kube-api-access-rwdjn\") pod \"kube-storage-version-migrator-operator-b67b599dd-4lfzc\" (UID: \"08650951-b99f-42e0-8321-9b1b9560f1cb\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4lfzc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.514548 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6wqbb"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.514556 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.514556 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.514581 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mt6mg\" (UniqueName: \"kubernetes.io/projected/7f1e1cca-4214-4752-8bc4-1972df6de928-kube-api-access-mt6mg\") pod \"machine-api-operator-5694c8668f-crxzj\" (UID: \"7f1e1cca-4214-4752-8bc4-1972df6de928\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.514720 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.514821 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ecd7785-d164-4998-a361-031cd179f164-config\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.515260 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5c5595c-6c47-401a-8067-a02411ef722b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zl7lm\" (UID: \"f5c5595c-6c47-401a-8067-a02411ef722b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zl7lm" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.515834 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5c5595c-6c47-401a-8067-a02411ef722b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zl7lm\" (UID: \"f5c5595c-6c47-401a-8067-a02411ef722b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zl7lm" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.515902 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-etcd-serving-ca\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.515933 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-audit-dir\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.515982 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e221469-9c10-4c08-925b-a4e4e4c3d208-serving-cert\") pod \"route-controller-manager-6576b87f9c-tlndc\" (UID: \"2e221469-9c10-4c08-925b-a4e4e4c3d208\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.516020 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dc5x\" (UniqueName: \"kubernetes.io/projected/9211752e-9c0b-43ea-9c4d-5d91fcb472db-kube-api-access-8dc5x\") pod \"router-default-5444994796-jzm89\" (UID: \"9211752e-9c0b-43ea-9c4d-5d91fcb472db\") " pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.516199 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f5c5595c-6c47-401a-8067-a02411ef722b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zl7lm\" (UID: \"f5c5595c-6c47-401a-8067-a02411ef722b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zl7lm" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.516247 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.516398 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/ff1e4a4f-2591-46a6-b478-7cef472306c3-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8l25g\" (UID: \"ff1e4a4f-2591-46a6-b478-7cef472306c3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8l25g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.516468 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6568e808-b0c8-4d3a-b454-51b9c810f2a3-profile-collector-cert\") pod \"catalog-operator-68c6474976-j2v2w\" (UID: \"6568e808-b0c8-4d3a-b454-51b9c810f2a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.521478 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3414e867-9935-42f6-9de9-853252ee06d3-config\") pod \"openshift-apiserver-operator-796bbdcf4f-bfp74\" (UID: \"3414e867-9935-42f6-9de9-853252ee06d3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bfp74" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.521540 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ssp4w\" (UniqueName: \"kubernetes.io/projected/4ac34e73-25e1-449d-9c43-e6bc5054ede8-kube-api-access-ssp4w\") pod \"downloads-7954f5f757-8klv5\" (UID: \"4ac34e73-25e1-449d-9c43-e6bc5054ede8\") " pod="openshift-console/downloads-7954f5f757-8klv5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.521561 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.521585 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/4ecd7785-d164-4998-a361-031cd179f164-etcd-service-ca\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.521602 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fbdc74c8-8b96-479b-b06c-637acb1bb68a-console-serving-cert\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.521626 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sd57\" (UniqueName: \"kubernetes.io/projected/3455abcd-3a0d-4376-908c-81484c62002a-kube-api-access-2sd57\") pod \"machine-approver-56656f9798-h2qcw\" (UID: \"3455abcd-3a0d-4376-908c-81484c62002a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.521645 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-config\") pod \"controller-manager-879f6c89f-v47hs\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.521669 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-config\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.521686 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fbdc74c8-8b96-479b-b06c-637acb1bb68a-trusted-ca-bundle\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.521703 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-image-import-ca\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.521720 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrr9r\" (UniqueName: \"kubernetes.io/projected/ac076d6f-30a7-4230-8320-936d709bf640-kube-api-access-wrr9r\") pod \"authentication-operator-69f744f599-czjn5\" (UID: \"ac076d6f-30a7-4230-8320-936d709bf640\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.521740 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfghh\" (UniqueName: \"kubernetes.io/projected/3c1f84bf-fc2e-465a-b3e1-436eb29e50ab-kube-api-access-hfghh\") pod \"machine-config-controller-84d6567774-8gzc6\" (UID: \"3c1f84bf-fc2e-465a-b3e1-436eb29e50ab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.521767 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-zrnhb\" (UID: \"24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.521815 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2e221469-9c10-4c08-925b-a4e4e4c3d208-client-ca\") pod \"route-controller-manager-6576b87f9c-tlndc\" (UID: \"2e221469-9c10-4c08-925b-a4e4e4c3d208\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.521836 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-audit\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.521839 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.522441 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.522531 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.522677 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-6wqbb" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.521853 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac076d6f-30a7-4230-8320-936d709bf640-config\") pod \"authentication-operator-69f744f599-czjn5\" (UID: \"ac076d6f-30a7-4230-8320-936d709bf640\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.522941 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-v47hs\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.522997 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/7f1e1cca-4214-4752-8bc4-1972df6de928-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-crxzj\" (UID: \"7f1e1cca-4214-4752-8bc4-1972df6de928\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523041 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9dbcf64e-599d-458f-a5cd-92f58deaa813-serving-cert\") pod \"console-operator-58897d9998-q2v54\" (UID: \"9dbcf64e-599d-458f-a5cd-92f58deaa813\") " pod="openshift-console-operator/console-operator-58897d9998-q2v54" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523067 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-audit-policies\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523106 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thtmf\" (UniqueName: \"kubernetes.io/projected/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-kube-api-access-thtmf\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523128 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z52cs\" (UniqueName: \"kubernetes.io/projected/5d3409f3-3730-479c-b48b-e3829fba88ae-kube-api-access-z52cs\") pod \"controller-manager-879f6c89f-v47hs\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523151 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w48rv\" (UniqueName: \"kubernetes.io/projected/fbdc74c8-8b96-479b-b06c-637acb1bb68a-kube-api-access-w48rv\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523191 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3c1f84bf-fc2e-465a-b3e1-436eb29e50ab-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-8gzc6\" (UID: \"3c1f84bf-fc2e-465a-b3e1-436eb29e50ab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523235 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-node-pullsecrets\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523258 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-encryption-config\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523281 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4d6fz\" (UniqueName: \"kubernetes.io/projected/2e221469-9c10-4c08-925b-a4e4e4c3d208-kube-api-access-4d6fz\") pod \"route-controller-manager-6576b87f9c-tlndc\" (UID: \"2e221469-9c10-4c08-925b-a4e4e4c3d208\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523313 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3455abcd-3a0d-4376-908c-81484c62002a-auth-proxy-config\") pod \"machine-approver-56656f9798-h2qcw\" (UID: \"3455abcd-3a0d-4376-908c-81484c62002a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523340 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac076d6f-30a7-4230-8320-936d709bf640-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-czjn5\" (UID: \"ac076d6f-30a7-4230-8320-936d709bf640\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523366 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9dbcf64e-599d-458f-a5cd-92f58deaa813-config\") pod \"console-operator-58897d9998-q2v54\" (UID: \"9dbcf64e-599d-458f-a5cd-92f58deaa813\") " pod="openshift-console-operator/console-operator-58897d9998-q2v54" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523390 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d3409f3-3730-479c-b48b-e3829fba88ae-serving-cert\") pod \"controller-manager-879f6c89f-v47hs\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523413 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7f1e1cca-4214-4752-8bc4-1972df6de928-images\") pod \"machine-api-operator-5694c8668f-crxzj\" (UID: \"7f1e1cca-4214-4752-8bc4-1972df6de928\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523435 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-etcd-client\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523455 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3455abcd-3a0d-4376-908c-81484c62002a-config\") pod \"machine-approver-56656f9798-h2qcw\" (UID: \"3455abcd-3a0d-4376-908c-81484c62002a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523476 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e8f5b2a0-33a0-46b8-9f75-6d732599e8e1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4gxkn\" (UID: \"e8f5b2a0-33a0-46b8-9f75-6d732599e8e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4gxkn" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523531 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgm59\" (UniqueName: \"kubernetes.io/projected/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-kube-api-access-pgm59\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523564 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsgkn\" (UniqueName: \"kubernetes.io/projected/ff1e4a4f-2591-46a6-b478-7cef472306c3-kube-api-access-xsgkn\") pod \"openshift-config-operator-7777fb866f-8l25g\" (UID: \"ff1e4a4f-2591-46a6-b478-7cef472306c3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8l25g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523589 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-audit-dir\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523612 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/9211752e-9c0b-43ea-9c4d-5d91fcb472db-stats-auth\") pod \"router-default-5444994796-jzm89\" (UID: \"9211752e-9c0b-43ea-9c4d-5d91fcb472db\") " pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.523636 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/08650951-b99f-42e0-8321-9b1b9560f1cb-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-4lfzc\" (UID: \"08650951-b99f-42e0-8321-9b1b9560f1cb\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4lfzc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.524091 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-cbsq8"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.524668 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.525166 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.525344 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cbsq8" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.525897 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.526939 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.527033 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-8klv5"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.528239 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v47hs"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.529281 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-crxzj"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.530289 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-r56mp"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.531898 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-clbf8"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.532722 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-clbf8" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.534497 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ktwn7"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.535587 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-q2v54"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.535689 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.538774 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4gxkn"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.539850 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-4b7r8"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.540854 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-t9jhb"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.541519 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.541621 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-t9jhb" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.541921 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-4b7r8" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.542199 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4lfzc"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.543139 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zl7lm"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.544049 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5n4lj"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.544980 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-77vtz"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.545938 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-c5zwr"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.546860 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nmql7"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.548058 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-rb4j9"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.549248 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-vl4d4"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.549939 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-zsqv7"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.550097 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.551245 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r2jbn"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.552001 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.553051 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.553718 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.554654 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.555719 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9wx9g"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.557528 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6mnd7"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.558209 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.558356 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9h2v4"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.560070 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.561669 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.562349 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.563378 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ktwn7"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.564541 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6wqbb"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.565688 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-z8tth"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.566695 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-t9jhb"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.567580 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-sgjk8"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.568808 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-cbsq8"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.570252 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-w2qz9"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.572188 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-w2qz9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.572615 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-4b7r8"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.573701 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-clbf8"] Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.580434 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.597279 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.617116 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624475 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/4ecd7785-d164-4998-a361-031cd179f164-etcd-ca\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624519 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwdjn\" (UniqueName: \"kubernetes.io/projected/08650951-b99f-42e0-8321-9b1b9560f1cb-kube-api-access-rwdjn\") pod \"kube-storage-version-migrator-operator-b67b599dd-4lfzc\" (UID: \"08650951-b99f-42e0-8321-9b1b9560f1cb\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4lfzc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624551 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624583 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mt6mg\" (UniqueName: \"kubernetes.io/projected/7f1e1cca-4214-4752-8bc4-1972df6de928-kube-api-access-mt6mg\") pod \"machine-api-operator-5694c8668f-crxzj\" (UID: \"7f1e1cca-4214-4752-8bc4-1972df6de928\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624604 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5c5595c-6c47-401a-8067-a02411ef722b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zl7lm\" (UID: \"f5c5595c-6c47-401a-8067-a02411ef722b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zl7lm" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624627 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-etcd-serving-ca\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624646 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-audit-dir\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624668 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ecd7785-d164-4998-a361-031cd179f164-config\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624687 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5c5595c-6c47-401a-8067-a02411ef722b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zl7lm\" (UID: \"f5c5595c-6c47-401a-8067-a02411ef722b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zl7lm" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624711 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e221469-9c10-4c08-925b-a4e4e4c3d208-serving-cert\") pod \"route-controller-manager-6576b87f9c-tlndc\" (UID: \"2e221469-9c10-4c08-925b-a4e4e4c3d208\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624734 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dc5x\" (UniqueName: \"kubernetes.io/projected/9211752e-9c0b-43ea-9c4d-5d91fcb472db-kube-api-access-8dc5x\") pod \"router-default-5444994796-jzm89\" (UID: \"9211752e-9c0b-43ea-9c4d-5d91fcb472db\") " pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624757 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f5c5595c-6c47-401a-8067-a02411ef722b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zl7lm\" (UID: \"f5c5595c-6c47-401a-8067-a02411ef722b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zl7lm" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624780 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/ff1e4a4f-2591-46a6-b478-7cef472306c3-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8l25g\" (UID: \"ff1e4a4f-2591-46a6-b478-7cef472306c3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8l25g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624800 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624822 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3414e867-9935-42f6-9de9-853252ee06d3-config\") pod \"openshift-apiserver-operator-796bbdcf4f-bfp74\" (UID: \"3414e867-9935-42f6-9de9-853252ee06d3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bfp74" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624846 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ssp4w\" (UniqueName: \"kubernetes.io/projected/4ac34e73-25e1-449d-9c43-e6bc5054ede8-kube-api-access-ssp4w\") pod \"downloads-7954f5f757-8klv5\" (UID: \"4ac34e73-25e1-449d-9c43-e6bc5054ede8\") " pod="openshift-console/downloads-7954f5f757-8klv5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624869 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624899 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6568e808-b0c8-4d3a-b454-51b9c810f2a3-profile-collector-cert\") pod \"catalog-operator-68c6474976-j2v2w\" (UID: \"6568e808-b0c8-4d3a-b454-51b9c810f2a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.624937 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sd57\" (UniqueName: \"kubernetes.io/projected/3455abcd-3a0d-4376-908c-81484c62002a-kube-api-access-2sd57\") pod \"machine-approver-56656f9798-h2qcw\" (UID: \"3455abcd-3a0d-4376-908c-81484c62002a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.625004 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-config\") pod \"controller-manager-879f6c89f-v47hs\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.625030 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/4ecd7785-d164-4998-a361-031cd179f164-etcd-service-ca\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.625032 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-audit-dir\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.625051 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fbdc74c8-8b96-479b-b06c-637acb1bb68a-console-serving-cert\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.625077 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-config\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.625386 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/4ecd7785-d164-4998-a361-031cd179f164-etcd-ca\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.625623 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ecd7785-d164-4998-a361-031cd179f164-config\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.625623 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-etcd-serving-ca\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.626300 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fbdc74c8-8b96-479b-b06c-637acb1bb68a-trusted-ca-bundle\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.626443 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfghh\" (UniqueName: \"kubernetes.io/projected/3c1f84bf-fc2e-465a-b3e1-436eb29e50ab-kube-api-access-hfghh\") pod \"machine-config-controller-84d6567774-8gzc6\" (UID: \"3c1f84bf-fc2e-465a-b3e1-436eb29e50ab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.626451 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3414e867-9935-42f6-9de9-853252ee06d3-config\") pod \"openshift-apiserver-operator-796bbdcf4f-bfp74\" (UID: \"3414e867-9935-42f6-9de9-853252ee06d3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bfp74" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.626479 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/4ecd7785-d164-4998-a361-031cd179f164-etcd-service-ca\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.626983 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-image-import-ca\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.627027 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrr9r\" (UniqueName: \"kubernetes.io/projected/ac076d6f-30a7-4230-8320-936d709bf640-kube-api-access-wrr9r\") pod \"authentication-operator-69f744f599-czjn5\" (UID: \"ac076d6f-30a7-4230-8320-936d709bf640\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.627531 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-config\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.627688 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2e221469-9c10-4c08-925b-a4e4e4c3d208-client-ca\") pod \"route-controller-manager-6576b87f9c-tlndc\" (UID: \"2e221469-9c10-4c08-925b-a4e4e4c3d208\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.627724 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-zrnhb\" (UID: \"24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.627699 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/ff1e4a4f-2591-46a6-b478-7cef472306c3-available-featuregates\") pod \"openshift-config-operator-7777fb866f-8l25g\" (UID: \"ff1e4a4f-2591-46a6-b478-7cef472306c3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8l25g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.628063 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac076d6f-30a7-4230-8320-936d709bf640-config\") pod \"authentication-operator-69f744f599-czjn5\" (UID: \"ac076d6f-30a7-4230-8320-936d709bf640\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.628100 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-config\") pod \"controller-manager-879f6c89f-v47hs\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.628269 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-v47hs\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.628704 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fbdc74c8-8b96-479b-b06c-637acb1bb68a-trusted-ca-bundle\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.628704 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-image-import-ca\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.628787 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-audit\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.628815 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/7f1e1cca-4214-4752-8bc4-1972df6de928-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-crxzj\" (UID: \"7f1e1cca-4214-4752-8bc4-1972df6de928\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.628843 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9dbcf64e-599d-458f-a5cd-92f58deaa813-serving-cert\") pod \"console-operator-58897d9998-q2v54\" (UID: \"9dbcf64e-599d-458f-a5cd-92f58deaa813\") " pod="openshift-console-operator/console-operator-58897d9998-q2v54" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.628866 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-audit-policies\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.628895 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w48rv\" (UniqueName: \"kubernetes.io/projected/fbdc74c8-8b96-479b-b06c-637acb1bb68a-kube-api-access-w48rv\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.628920 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3c1f84bf-fc2e-465a-b3e1-436eb29e50ab-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-8gzc6\" (UID: \"3c1f84bf-fc2e-465a-b3e1-436eb29e50ab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.628976 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thtmf\" (UniqueName: \"kubernetes.io/projected/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-kube-api-access-thtmf\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629002 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z52cs\" (UniqueName: \"kubernetes.io/projected/5d3409f3-3730-479c-b48b-e3829fba88ae-kube-api-access-z52cs\") pod \"controller-manager-879f6c89f-v47hs\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629025 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4d6fz\" (UniqueName: \"kubernetes.io/projected/2e221469-9c10-4c08-925b-a4e4e4c3d208-kube-api-access-4d6fz\") pod \"route-controller-manager-6576b87f9c-tlndc\" (UID: \"2e221469-9c10-4c08-925b-a4e4e4c3d208\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629046 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3455abcd-3a0d-4376-908c-81484c62002a-auth-proxy-config\") pod \"machine-approver-56656f9798-h2qcw\" (UID: \"3455abcd-3a0d-4376-908c-81484c62002a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629070 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-node-pullsecrets\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629091 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-encryption-config\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629096 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ac076d6f-30a7-4230-8320-936d709bf640-config\") pod \"authentication-operator-69f744f599-czjn5\" (UID: \"ac076d6f-30a7-4230-8320-936d709bf640\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629111 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac076d6f-30a7-4230-8320-936d709bf640-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-czjn5\" (UID: \"ac076d6f-30a7-4230-8320-936d709bf640\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629147 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9dbcf64e-599d-458f-a5cd-92f58deaa813-config\") pod \"console-operator-58897d9998-q2v54\" (UID: \"9dbcf64e-599d-458f-a5cd-92f58deaa813\") " pod="openshift-console-operator/console-operator-58897d9998-q2v54" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629171 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d3409f3-3730-479c-b48b-e3829fba88ae-serving-cert\") pod \"controller-manager-879f6c89f-v47hs\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629195 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7f1e1cca-4214-4752-8bc4-1972df6de928-images\") pod \"machine-api-operator-5694c8668f-crxzj\" (UID: \"7f1e1cca-4214-4752-8bc4-1972df6de928\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629223 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e8f5b2a0-33a0-46b8-9f75-6d732599e8e1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4gxkn\" (UID: \"e8f5b2a0-33a0-46b8-9f75-6d732599e8e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4gxkn" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629250 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgm59\" (UniqueName: \"kubernetes.io/projected/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-kube-api-access-pgm59\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629277 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsgkn\" (UniqueName: \"kubernetes.io/projected/ff1e4a4f-2591-46a6-b478-7cef472306c3-kube-api-access-xsgkn\") pod \"openshift-config-operator-7777fb866f-8l25g\" (UID: \"ff1e4a4f-2591-46a6-b478-7cef472306c3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8l25g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629298 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-etcd-client\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629322 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3455abcd-3a0d-4376-908c-81484c62002a-config\") pod \"machine-approver-56656f9798-h2qcw\" (UID: \"3455abcd-3a0d-4376-908c-81484c62002a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629344 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-audit-dir\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629364 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/9211752e-9c0b-43ea-9c4d-5d91fcb472db-stats-auth\") pod \"router-default-5444994796-jzm89\" (UID: \"9211752e-9c0b-43ea-9c4d-5d91fcb472db\") " pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629390 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/08650951-b99f-42e0-8321-9b1b9560f1cb-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-4lfzc\" (UID: \"08650951-b99f-42e0-8321-9b1b9560f1cb\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4lfzc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629428 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-zrnhb\" (UID: \"24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629454 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9211752e-9c0b-43ea-9c4d-5d91fcb472db-service-ca-bundle\") pod \"router-default-5444994796-jzm89\" (UID: \"9211752e-9c0b-43ea-9c4d-5d91fcb472db\") " pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629484 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ecd7785-d164-4998-a361-031cd179f164-serving-cert\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629504 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f1e1cca-4214-4752-8bc4-1972df6de928-config\") pod \"machine-api-operator-5694c8668f-crxzj\" (UID: \"7f1e1cca-4214-4752-8bc4-1972df6de928\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629529 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-etcd-client\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629548 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr54t\" (UniqueName: \"kubernetes.io/projected/24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab-kube-api-access-pr54t\") pod \"cluster-image-registry-operator-dc59b4c8b-zrnhb\" (UID: \"24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629572 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-trusted-ca-bundle\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629596 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.629818 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac076d6f-30a7-4230-8320-936d709bf640-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-czjn5\" (UID: \"ac076d6f-30a7-4230-8320-936d709bf640\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.630067 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2e221469-9c10-4c08-925b-a4e4e4c3d208-client-ca\") pod \"route-controller-manager-6576b87f9c-tlndc\" (UID: \"2e221469-9c10-4c08-925b-a4e4e4c3d208\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.630131 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-audit-dir\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.630326 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.630430 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.630587 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e221469-9c10-4c08-925b-a4e4e4c3d208-serving-cert\") pod \"route-controller-manager-6576b87f9c-tlndc\" (UID: \"2e221469-9c10-4c08-925b-a4e4e4c3d208\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.630606 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9211752e-9c0b-43ea-9c4d-5d91fcb472db-metrics-certs\") pod \"router-default-5444994796-jzm89\" (UID: \"9211752e-9c0b-43ea-9c4d-5d91fcb472db\") " pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.635158 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/57d5b2c5-8c4a-410b-9acf-94852865a8d6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-9h2v4\" (UID: \"57d5b2c5-8c4a-410b-9acf-94852865a8d6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9h2v4" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.635488 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff1e4a4f-2591-46a6-b478-7cef472306c3-serving-cert\") pod \"openshift-config-operator-7777fb866f-8l25g\" (UID: \"ff1e4a4f-2591-46a6-b478-7cef472306c3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8l25g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.635507 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.631610 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/fbdc74c8-8b96-479b-b06c-637acb1bb68a-console-serving-cert\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.632037 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-v47hs\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.632052 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-audit\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.632304 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3c1f84bf-fc2e-465a-b3e1-436eb29e50ab-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-8gzc6\" (UID: \"3c1f84bf-fc2e-465a-b3e1-436eb29e50ab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.632418 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9dbcf64e-599d-458f-a5cd-92f58deaa813-config\") pod \"console-operator-58897d9998-q2v54\" (UID: \"9dbcf64e-599d-458f-a5cd-92f58deaa813\") " pod="openshift-console-operator/console-operator-58897d9998-q2v54" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.632462 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.631431 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9dbcf64e-599d-458f-a5cd-92f58deaa813-serving-cert\") pod \"console-operator-58897d9998-q2v54\" (UID: \"9dbcf64e-599d-458f-a5cd-92f58deaa813\") " pod="openshift-console-operator/console-operator-58897d9998-q2v54" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.632895 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.632897 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-zrnhb\" (UID: \"24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.632939 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-node-pullsecrets\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.633643 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7f1e1cca-4214-4752-8bc4-1972df6de928-images\") pod \"machine-api-operator-5694c8668f-crxzj\" (UID: \"7f1e1cca-4214-4752-8bc4-1972df6de928\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.633806 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-trusted-ca-bundle\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.634028 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d3409f3-3730-479c-b48b-e3829fba88ae-serving-cert\") pod \"controller-manager-879f6c89f-v47hs\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.634224 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-audit-policies\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.634318 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3455abcd-3a0d-4376-908c-81484c62002a-config\") pod \"machine-approver-56656f9798-h2qcw\" (UID: \"3455abcd-3a0d-4376-908c-81484c62002a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.634488 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.631080 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.635434 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-encryption-config\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.635526 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/3455abcd-3a0d-4376-908c-81484c62002a-machine-approver-tls\") pod \"machine-approver-56656f9798-h2qcw\" (UID: \"3455abcd-3a0d-4376-908c-81484c62002a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.635880 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mn7c\" (UniqueName: \"kubernetes.io/projected/e8f5b2a0-33a0-46b8-9f75-6d732599e8e1-kube-api-access-7mn7c\") pod \"cluster-samples-operator-665b6dd947-4gxkn\" (UID: \"e8f5b2a0-33a0-46b8-9f75-6d732599e8e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4gxkn" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.635900 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fbdc74c8-8b96-479b-b06c-637acb1bb68a-console-config\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.635918 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nn6kq\" (UniqueName: \"kubernetes.io/projected/f0fc8c25-f7b2-4f7b-8ea7-323e91557a24-kube-api-access-nn6kq\") pod \"migrator-59844c95c7-6mnd7\" (UID: \"f0fc8c25-f7b2-4f7b-8ea7-323e91557a24\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mnd7" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.635938 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d5b2c5-8c4a-410b-9acf-94852865a8d6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-9h2v4\" (UID: \"57d5b2c5-8c4a-410b-9acf-94852865a8d6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9h2v4" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.635975 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-audit-dir\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.635993 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9dbcf64e-599d-458f-a5cd-92f58deaa813-trusted-ca\") pod \"console-operator-58897d9998-q2v54\" (UID: \"9dbcf64e-599d-458f-a5cd-92f58deaa813\") " pod="openshift-console-operator/console-operator-58897d9998-q2v54" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636010 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636011 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/e8f5b2a0-33a0-46b8-9f75-6d732599e8e1-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-4gxkn\" (UID: \"e8f5b2a0-33a0-46b8-9f75-6d732599e8e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4gxkn" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636027 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac076d6f-30a7-4230-8320-936d709bf640-service-ca-bundle\") pod \"authentication-operator-69f744f599-czjn5\" (UID: \"ac076d6f-30a7-4230-8320-936d709bf640\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636044 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636063 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08650951-b99f-42e0-8321-9b1b9560f1cb-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-4lfzc\" (UID: \"08650951-b99f-42e0-8321-9b1b9560f1cb\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4lfzc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.632819 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3455abcd-3a0d-4376-908c-81484c62002a-auth-proxy-config\") pod \"machine-approver-56656f9798-h2qcw\" (UID: \"3455abcd-3a0d-4376-908c-81484c62002a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636081 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-encryption-config\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636194 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636229 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzkr7\" (UniqueName: \"kubernetes.io/projected/4ecd7785-d164-4998-a361-031cd179f164-kube-api-access-kzkr7\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636255 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3414e867-9935-42f6-9de9-853252ee06d3-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-bfp74\" (UID: \"3414e867-9935-42f6-9de9-853252ee06d3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bfp74" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636279 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-serving-cert\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636300 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636320 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-client-ca\") pod \"controller-manager-879f6c89f-v47hs\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636337 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636353 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/9211752e-9c0b-43ea-9c4d-5d91fcb472db-default-certificate\") pod \"router-default-5444994796-jzm89\" (UID: \"9211752e-9c0b-43ea-9c4d-5d91fcb472db\") " pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636476 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3c1f84bf-fc2e-465a-b3e1-436eb29e50ab-proxy-tls\") pod \"machine-config-controller-84d6567774-8gzc6\" (UID: \"3c1f84bf-fc2e-465a-b3e1-436eb29e50ab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636495 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jk66\" (UniqueName: \"kubernetes.io/projected/9dbcf64e-599d-458f-a5cd-92f58deaa813-kube-api-access-6jk66\") pod \"console-operator-58897d9998-q2v54\" (UID: \"9dbcf64e-599d-458f-a5cd-92f58deaa813\") " pod="openshift-console-operator/console-operator-58897d9998-q2v54" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636515 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636533 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fbdc74c8-8b96-479b-b06c-637acb1bb68a-oauth-serving-cert\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636571 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6568e808-b0c8-4d3a-b454-51b9c810f2a3-srv-cert\") pod \"catalog-operator-68c6474976-j2v2w\" (UID: \"6568e808-b0c8-4d3a-b454-51b9c810f2a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636574 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636595 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fbdc74c8-8b96-479b-b06c-637acb1bb68a-console-oauth-config\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636616 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nq6fd\" (UniqueName: \"kubernetes.io/projected/6568e808-b0c8-4d3a-b454-51b9c810f2a3-kube-api-access-nq6fd\") pod \"catalog-operator-68c6474976-j2v2w\" (UID: \"6568e808-b0c8-4d3a-b454-51b9c810f2a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636640 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-serving-cert\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636657 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac076d6f-30a7-4230-8320-936d709bf640-serving-cert\") pod \"authentication-operator-69f744f599-czjn5\" (UID: \"ac076d6f-30a7-4230-8320-936d709bf640\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636677 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-zrnhb\" (UID: \"24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636711 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bnslm\" (UniqueName: \"kubernetes.io/projected/3414e867-9935-42f6-9de9-853252ee06d3-kube-api-access-bnslm\") pod \"openshift-apiserver-operator-796bbdcf4f-bfp74\" (UID: \"3414e867-9935-42f6-9de9-853252ee06d3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bfp74" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636746 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fbdc74c8-8b96-479b-b06c-637acb1bb68a-service-ca\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636769 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7c2d\" (UniqueName: \"kubernetes.io/projected/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-kube-api-access-f7c2d\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636785 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-audit-policies\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636801 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636844 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e221469-9c10-4c08-925b-a4e4e4c3d208-config\") pod \"route-controller-manager-6576b87f9c-tlndc\" (UID: \"2e221469-9c10-4c08-925b-a4e4e4c3d208\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636866 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636881 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4ecd7785-d164-4998-a361-031cd179f164-etcd-client\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636898 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcx4v\" (UniqueName: \"kubernetes.io/projected/57d5b2c5-8c4a-410b-9acf-94852865a8d6-kube-api-access-vcx4v\") pod \"openshift-controller-manager-operator-756b6f6bc6-9h2v4\" (UID: \"57d5b2c5-8c4a-410b-9acf-94852865a8d6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9h2v4" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.637002 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/fbdc74c8-8b96-479b-b06c-637acb1bb68a-console-config\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.637606 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/7f1e1cca-4214-4752-8bc4-1972df6de928-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-crxzj\" (UID: \"7f1e1cca-4214-4752-8bc4-1972df6de928\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.637648 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/57d5b2c5-8c4a-410b-9acf-94852865a8d6-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-9h2v4\" (UID: \"57d5b2c5-8c4a-410b-9acf-94852865a8d6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9h2v4" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.631908 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f1e1cca-4214-4752-8bc4-1972df6de928-config\") pod \"machine-api-operator-5694c8668f-crxzj\" (UID: \"7f1e1cca-4214-4752-8bc4-1972df6de928\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.638023 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-encryption-config\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.638042 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.638239 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ac076d6f-30a7-4230-8320-936d709bf640-service-ca-bundle\") pod \"authentication-operator-69f744f599-czjn5\" (UID: \"ac076d6f-30a7-4230-8320-936d709bf640\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.638461 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/fbdc74c8-8b96-479b-b06c-637acb1bb68a-oauth-serving-cert\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.638473 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/3455abcd-3a0d-4376-908c-81484c62002a-machine-approver-tls\") pod \"machine-approver-56656f9798-h2qcw\" (UID: \"3455abcd-3a0d-4376-908c-81484c62002a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.638517 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.638633 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/fbdc74c8-8b96-479b-b06c-637acb1bb68a-service-ca\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.639264 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9dbcf64e-599d-458f-a5cd-92f58deaa813-trusted-ca\") pod \"console-operator-58897d9998-q2v54\" (UID: \"9dbcf64e-599d-458f-a5cd-92f58deaa813\") " pod="openshift-console-operator/console-operator-58897d9998-q2v54" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.639489 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e221469-9c10-4c08-925b-a4e4e4c3d208-config\") pod \"route-controller-manager-6576b87f9c-tlndc\" (UID: \"2e221469-9c10-4c08-925b-a4e4e4c3d208\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.639657 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.636298 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-audit-dir\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.640238 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-audit-policies\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.640364 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.640775 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-client-ca\") pod \"controller-manager-879f6c89f-v47hs\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.641199 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ecd7785-d164-4998-a361-031cd179f164-serving-cert\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.641555 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.641708 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.641740 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-etcd-client\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.641764 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ac076d6f-30a7-4230-8320-936d709bf640-serving-cert\") pod \"authentication-operator-69f744f599-czjn5\" (UID: \"ac076d6f-30a7-4230-8320-936d709bf640\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.641764 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-etcd-client\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.642027 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.642150 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.642806 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff1e4a4f-2591-46a6-b478-7cef472306c3-serving-cert\") pod \"openshift-config-operator-7777fb866f-8l25g\" (UID: \"ff1e4a4f-2591-46a6-b478-7cef472306c3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8l25g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.643479 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3414e867-9935-42f6-9de9-853252ee06d3-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-bfp74\" (UID: \"3414e867-9935-42f6-9de9-853252ee06d3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bfp74" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.643629 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.643763 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-zrnhb\" (UID: \"24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.644104 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-serving-cert\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.644513 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-serving-cert\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.645015 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/fbdc74c8-8b96-479b-b06c-637acb1bb68a-console-oauth-config\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.645150 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4ecd7785-d164-4998-a361-031cd179f164-etcd-client\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.645477 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.658025 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.677475 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.690139 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/57d5b2c5-8c4a-410b-9acf-94852865a8d6-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-9h2v4\" (UID: \"57d5b2c5-8c4a-410b-9acf-94852865a8d6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9h2v4" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.698076 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.718539 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.738085 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.757386 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.783712 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.797619 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.817987 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.837267 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.857406 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.877581 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.897901 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.917245 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.937052 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.956654 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.977430 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 11 14:15:19 crc kubenswrapper[4690]: I1211 14:15:19.998070 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.017651 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.037055 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.057181 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.078284 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.093300 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/9211752e-9c0b-43ea-9c4d-5d91fcb472db-default-certificate\") pod \"router-default-5444994796-jzm89\" (UID: \"9211752e-9c0b-43ea-9c4d-5d91fcb472db\") " pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.097366 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.108533 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/9211752e-9c0b-43ea-9c4d-5d91fcb472db-stats-auth\") pod \"router-default-5444994796-jzm89\" (UID: \"9211752e-9c0b-43ea-9c4d-5d91fcb472db\") " pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.117548 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.127264 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9211752e-9c0b-43ea-9c4d-5d91fcb472db-metrics-certs\") pod \"router-default-5444994796-jzm89\" (UID: \"9211752e-9c0b-43ea-9c4d-5d91fcb472db\") " pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.138112 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.143017 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs\") pod \"network-metrics-daemon-r8sd9\" (UID: \"0cbb05eb-6650-45bc-ae3f-d29df5940583\") " pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.143942 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9211752e-9c0b-43ea-9c4d-5d91fcb472db-service-ca-bundle\") pod \"router-default-5444994796-jzm89\" (UID: \"9211752e-9c0b-43ea-9c4d-5d91fcb472db\") " pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.157034 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.178326 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.192504 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/3c1f84bf-fc2e-465a-b3e1-436eb29e50ab-proxy-tls\") pod \"machine-config-controller-84d6567774-8gzc6\" (UID: \"3c1f84bf-fc2e-465a-b3e1-436eb29e50ab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.198240 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.218276 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.238321 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.257513 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.265620 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/08650951-b99f-42e0-8321-9b1b9560f1cb-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-4lfzc\" (UID: \"08650951-b99f-42e0-8321-9b1b9560f1cb\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4lfzc" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.278109 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.297592 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.301284 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/08650951-b99f-42e0-8321-9b1b9560f1cb-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-4lfzc\" (UID: \"08650951-b99f-42e0-8321-9b1b9560f1cb\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4lfzc" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.317893 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.338557 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.358502 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.369593 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5c5595c-6c47-401a-8067-a02411ef722b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zl7lm\" (UID: \"f5c5595c-6c47-401a-8067-a02411ef722b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zl7lm" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.377493 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.386811 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5c5595c-6c47-401a-8067-a02411ef722b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zl7lm\" (UID: \"f5c5595c-6c47-401a-8067-a02411ef722b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zl7lm" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.397548 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.417528 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.437852 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.442214 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6568e808-b0c8-4d3a-b454-51b9c810f2a3-srv-cert\") pod \"catalog-operator-68c6474976-j2v2w\" (UID: \"6568e808-b0c8-4d3a-b454-51b9c810f2a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.456028 4690 request.go:700] Waited for 1.000428869s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/secrets?fieldSelector=metadata.name%3Dpprof-cert&limit=500&resourceVersion=0 Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.457870 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.470242 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6568e808-b0c8-4d3a-b454-51b9c810f2a3-profile-collector-cert\") pod \"catalog-operator-68c6474976-j2v2w\" (UID: \"6568e808-b0c8-4d3a-b454-51b9c810f2a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.478411 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.498057 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.517326 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.538313 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.557868 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.577991 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.597454 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.617568 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.632351 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.657648 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.678494 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.706402 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.717925 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.737933 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.758630 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.778133 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.797554 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.817808 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.837018 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.856614 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.877799 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.899273 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.917301 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.937990 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.958164 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.977235 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 11 14:15:20 crc kubenswrapper[4690]: I1211 14:15:20.997208 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.017232 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.037383 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.057601 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.076840 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.096720 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.118229 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.138065 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 11 14:15:21 crc kubenswrapper[4690]: E1211 14:15:21.144002 4690 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: failed to sync secret cache: timed out waiting for the condition Dec 11 14:15:21 crc kubenswrapper[4690]: E1211 14:15:21.144069 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs podName:0cbb05eb-6650-45bc-ae3f-d29df5940583 nodeName:}" failed. No retries permitted until 2025-12-11 14:15:37.144050182 +0000 UTC m=+68.759451825 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs") pod "network-metrics-daemon-r8sd9" (UID: "0cbb05eb-6650-45bc-ae3f-d29df5940583") : failed to sync secret cache: timed out waiting for the condition Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.158567 4690 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.198517 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.218295 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.237103 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.257507 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.278016 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.298312 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.317289 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.338153 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-sysctl-allowlist" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.358544 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.378098 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.398389 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.435671 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwdjn\" (UniqueName: \"kubernetes.io/projected/08650951-b99f-42e0-8321-9b1b9560f1cb-kube-api-access-rwdjn\") pod \"kube-storage-version-migrator-operator-b67b599dd-4lfzc\" (UID: \"08650951-b99f-42e0-8321-9b1b9560f1cb\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4lfzc" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.454678 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sd57\" (UniqueName: \"kubernetes.io/projected/3455abcd-3a0d-4376-908c-81484c62002a-kube-api-access-2sd57\") pod \"machine-approver-56656f9798-h2qcw\" (UID: \"3455abcd-3a0d-4376-908c-81484c62002a\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.456515 4690 request.go:700] Waited for 1.830212671s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress/serviceaccounts/router/token Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.473191 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dc5x\" (UniqueName: \"kubernetes.io/projected/9211752e-9c0b-43ea-9c4d-5d91fcb472db-kube-api-access-8dc5x\") pod \"router-default-5444994796-jzm89\" (UID: \"9211752e-9c0b-43ea-9c4d-5d91fcb472db\") " pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.484030 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4lfzc" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.492843 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ssp4w\" (UniqueName: \"kubernetes.io/projected/4ac34e73-25e1-449d-9c43-e6bc5054ede8-kube-api-access-ssp4w\") pod \"downloads-7954f5f757-8klv5\" (UID: \"4ac34e73-25e1-449d-9c43-e6bc5054ede8\") " pod="openshift-console/downloads-7954f5f757-8klv5" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.512327 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f5c5595c-6c47-401a-8067-a02411ef722b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zl7lm\" (UID: \"f5c5595c-6c47-401a-8067-a02411ef722b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zl7lm" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.533060 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrr9r\" (UniqueName: \"kubernetes.io/projected/ac076d6f-30a7-4230-8320-936d709bf640-kube-api-access-wrr9r\") pod \"authentication-operator-69f744f599-czjn5\" (UID: \"ac076d6f-30a7-4230-8320-936d709bf640\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.533598 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.551795 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfghh\" (UniqueName: \"kubernetes.io/projected/3c1f84bf-fc2e-465a-b3e1-436eb29e50ab-kube-api-access-hfghh\") pod \"machine-config-controller-84d6567774-8gzc6\" (UID: \"3c1f84bf-fc2e-465a-b3e1-436eb29e50ab\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.574426 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-zrnhb\" (UID: \"24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.592083 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z52cs\" (UniqueName: \"kubernetes.io/projected/5d3409f3-3730-479c-b48b-e3829fba88ae-kube-api-access-z52cs\") pod \"controller-manager-879f6c89f-v47hs\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.598251 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.607013 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-8klv5" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.616135 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.636034 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w48rv\" (UniqueName: \"kubernetes.io/projected/fbdc74c8-8b96-479b-b06c-637acb1bb68a-kube-api-access-w48rv\") pod \"console-f9d7485db-rb4j9\" (UID: \"fbdc74c8-8b96-479b-b06c-637acb1bb68a\") " pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.661289 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgm59\" (UniqueName: \"kubernetes.io/projected/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-kube-api-access-pgm59\") pod \"oauth-openshift-558db77b4-9wx9g\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.662095 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.670804 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4lfzc"] Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.675300 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thtmf\" (UniqueName: \"kubernetes.io/projected/7406c3ff-2d22-41c7-ae0b-fa8180cc5ede-kube-api-access-thtmf\") pod \"apiserver-7bbb656c7d-j9qmk\" (UID: \"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.692978 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsgkn\" (UniqueName: \"kubernetes.io/projected/ff1e4a4f-2591-46a6-b478-7cef472306c3-kube-api-access-xsgkn\") pod \"openshift-config-operator-7777fb866f-8l25g\" (UID: \"ff1e4a4f-2591-46a6-b478-7cef472306c3\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-8l25g" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.713336 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mt6mg\" (UniqueName: \"kubernetes.io/projected/7f1e1cca-4214-4752-8bc4-1972df6de928-kube-api-access-mt6mg\") pod \"machine-api-operator-5694c8668f-crxzj\" (UID: \"7f1e1cca-4214-4752-8bc4-1972df6de928\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.725448 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.736693 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4d6fz\" (UniqueName: \"kubernetes.io/projected/2e221469-9c10-4c08-925b-a4e4e4c3d208-kube-api-access-4d6fz\") pod \"route-controller-manager-6576b87f9c-tlndc\" (UID: \"2e221469-9c10-4c08-925b-a4e4e4c3d208\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.739306 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.756576 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.761132 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.775191 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.776742 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr54t\" (UniqueName: \"kubernetes.io/projected/24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab-kube-api-access-pr54t\") pod \"cluster-image-registry-operator-dc59b4c8b-zrnhb\" (UID: \"24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.779670 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.783556 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.789919 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zl7lm" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.799366 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.808975 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.818453 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.851401 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8l25g" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.858234 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-8klv5"] Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.872581 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mn7c\" (UniqueName: \"kubernetes.io/projected/e8f5b2a0-33a0-46b8-9f75-6d732599e8e1-kube-api-access-7mn7c\") pod \"cluster-samples-operator-665b6dd947-4gxkn\" (UID: \"e8f5b2a0-33a0-46b8-9f75-6d732599e8e1\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4gxkn" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.889658 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nn6kq\" (UniqueName: \"kubernetes.io/projected/f0fc8c25-f7b2-4f7b-8ea7-323e91557a24-kube-api-access-nn6kq\") pod \"migrator-59844c95c7-6mnd7\" (UID: \"f0fc8c25-f7b2-4f7b-8ea7-323e91557a24\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mnd7" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.907141 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcx4v\" (UniqueName: \"kubernetes.io/projected/57d5b2c5-8c4a-410b-9acf-94852865a8d6-kube-api-access-vcx4v\") pod \"openshift-controller-manager-operator-756b6f6bc6-9h2v4\" (UID: \"57d5b2c5-8c4a-410b-9acf-94852865a8d6\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9h2v4" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.911142 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v47hs"] Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.925719 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jk66\" (UniqueName: \"kubernetes.io/projected/9dbcf64e-599d-458f-a5cd-92f58deaa813-kube-api-access-6jk66\") pod \"console-operator-58897d9998-q2v54\" (UID: \"9dbcf64e-599d-458f-a5cd-92f58deaa813\") " pod="openshift-console-operator/console-operator-58897d9998-q2v54" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.936229 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-rb4j9"] Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.939223 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.942945 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.949938 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bnslm\" (UniqueName: \"kubernetes.io/projected/3414e867-9935-42f6-9de9-853252ee06d3-kube-api-access-bnslm\") pod \"openshift-apiserver-operator-796bbdcf4f-bfp74\" (UID: \"3414e867-9935-42f6-9de9-853252ee06d3\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bfp74" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.978701 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7c2d\" (UniqueName: \"kubernetes.io/projected/6ddc4574-895b-41fa-a8c9-47c4f303d0d9-kube-api-access-f7c2d\") pod \"apiserver-76f77b778f-r56mp\" (UID: \"6ddc4574-895b-41fa-a8c9-47c4f303d0d9\") " pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:21 crc kubenswrapper[4690]: I1211 14:15:21.982089 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9h2v4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:21.996371 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nq6fd\" (UniqueName: \"kubernetes.io/projected/6568e808-b0c8-4d3a-b454-51b9c810f2a3-kube-api-access-nq6fd\") pod \"catalog-operator-68c6474976-j2v2w\" (UID: \"6568e808-b0c8-4d3a-b454-51b9c810f2a3\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.016028 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzkr7\" (UniqueName: \"kubernetes.io/projected/4ecd7785-d164-4998-a361-031cd179f164-kube-api-access-kzkr7\") pod \"etcd-operator-b45778765-77vtz\" (UID: \"4ecd7785-d164-4998-a361-031cd179f164\") " pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.019411 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk"] Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.031696 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.077606 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bfp74" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.078405 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cb8be15-fc6f-43f0-865e-96da3547155f-config\") pod \"kube-controller-manager-operator-78b949d7b-nmql7\" (UID: \"6cb8be15-fc6f-43f0-865e-96da3547155f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nmql7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.078461 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6f2s\" (UniqueName: \"kubernetes.io/projected/807380a2-3571-4626-b33a-71e79bdbf087-kube-api-access-p6f2s\") pod \"package-server-manager-789f6589d5-sgjk8\" (UID: \"807380a2-3571-4626-b33a-71e79bdbf087\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-sgjk8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.078492 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wm2q7\" (UniqueName: \"kubernetes.io/projected/a3d8ec77-4a18-4938-970d-4d83f834a70e-kube-api-access-wm2q7\") pod \"multus-admission-controller-857f4d67dd-zsqv7\" (UID: \"a3d8ec77-4a18-4938-970d-4d83f834a70e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zsqv7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.078687 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-ca-trust-extracted\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.078725 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-trusted-ca\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.078749 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-installation-pull-secrets\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.078776 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86crh\" (UniqueName: \"kubernetes.io/projected/0fd85a56-df39-46c5-82a5-808a37a3dc8e-kube-api-access-86crh\") pod \"dns-operator-744455d44c-z8tth\" (UID: \"0fd85a56-df39-46c5-82a5-808a37a3dc8e\") " pod="openshift-dns-operator/dns-operator-744455d44c-z8tth" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.078820 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4x8n\" (UniqueName: \"kubernetes.io/projected/ef32fe73-cf65-4729-9f54-5cffddba74e1-kube-api-access-b4x8n\") pod \"ingress-operator-5b745b69d9-r5xkr\" (UID: \"ef32fe73-cf65-4729-9f54-5cffddba74e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.078841 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-registry-certificates\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.078902 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f62b5697-8059-4222-b731-2f8188e9a270-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5n4lj\" (UID: \"f62b5697-8059-4222-b731-2f8188e9a270\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5n4lj" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.078927 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltfsg\" (UniqueName: \"kubernetes.io/projected/7b031965-e2b9-4d51-b598-9b8366944ee5-kube-api-access-ltfsg\") pod \"olm-operator-6b444d44fb-b4ml2\" (UID: \"7b031965-e2b9-4d51-b598-9b8366944ee5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.078984 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0fd85a56-df39-46c5-82a5-808a37a3dc8e-metrics-tls\") pod \"dns-operator-744455d44c-z8tth\" (UID: \"0fd85a56-df39-46c5-82a5-808a37a3dc8e\") " pod="openshift-dns-operator/dns-operator-744455d44c-z8tth" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.079010 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c66d4\" (UniqueName: \"kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-kube-api-access-c66d4\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.079083 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-bound-sa-token\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.079114 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6cb8be15-fc6f-43f0-865e-96da3547155f-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-nmql7\" (UID: \"6cb8be15-fc6f-43f0-865e-96da3547155f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nmql7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.079691 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.079759 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/807380a2-3571-4626-b33a-71e79bdbf087-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-sgjk8\" (UID: \"807380a2-3571-4626-b33a-71e79bdbf087\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-sgjk8" Dec 11 14:15:22 crc kubenswrapper[4690]: E1211 14:15:22.079994 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:22.57997916 +0000 UTC m=+54.195380873 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.080032 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6cb8be15-fc6f-43f0-865e-96da3547155f-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-nmql7\" (UID: \"6cb8be15-fc6f-43f0-865e-96da3547155f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nmql7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.080075 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f62b5697-8059-4222-b731-2f8188e9a270-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5n4lj\" (UID: \"f62b5697-8059-4222-b731-2f8188e9a270\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5n4lj" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.080117 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a3d8ec77-4a18-4938-970d-4d83f834a70e-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-zsqv7\" (UID: \"a3d8ec77-4a18-4938-970d-4d83f834a70e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zsqv7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.080142 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7b031965-e2b9-4d51-b598-9b8366944ee5-srv-cert\") pod \"olm-operator-6b444d44fb-b4ml2\" (UID: \"7b031965-e2b9-4d51-b598-9b8366944ee5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.080174 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ef32fe73-cf65-4729-9f54-5cffddba74e1-metrics-tls\") pod \"ingress-operator-5b745b69d9-r5xkr\" (UID: \"ef32fe73-cf65-4729-9f54-5cffddba74e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.080259 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-registry-tls\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.080399 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7b031965-e2b9-4d51-b598-9b8366944ee5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-b4ml2\" (UID: \"7b031965-e2b9-4d51-b598-9b8366944ee5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.080433 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f62b5697-8059-4222-b731-2f8188e9a270-config\") pod \"kube-apiserver-operator-766d6c64bb-5n4lj\" (UID: \"f62b5697-8059-4222-b731-2f8188e9a270\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5n4lj" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.080734 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ef32fe73-cf65-4729-9f54-5cffddba74e1-trusted-ca\") pod \"ingress-operator-5b745b69d9-r5xkr\" (UID: \"ef32fe73-cf65-4729-9f54-5cffddba74e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.080767 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ef32fe73-cf65-4729-9f54-5cffddba74e1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-r5xkr\" (UID: \"ef32fe73-cf65-4729-9f54-5cffddba74e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.107640 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.123129 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-q2v54" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.129111 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mnd7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.155720 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4gxkn" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.183485 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:22 crc kubenswrapper[4690]: E1211 14:15:22.183663 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:22.683641013 +0000 UTC m=+54.299042666 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.183751 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/080f2e20-451b-4825-89bd-2f35b98158b7-config-volume\") pod \"dns-default-t9jhb\" (UID: \"080f2e20-451b-4825-89bd-2f35b98158b7\") " pod="openshift-dns/dns-default-t9jhb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.183780 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86vrb\" (UniqueName: \"kubernetes.io/projected/7731421f-ecdd-4ff8-aa63-79ae9983425f-kube-api-access-86vrb\") pod \"cni-sysctl-allowlist-ds-vl4d4\" (UID: \"7731421f-ecdd-4ff8-aa63-79ae9983425f\") " pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.183808 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7731421f-ecdd-4ff8-aa63-79ae9983425f-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-vl4d4\" (UID: \"7731421f-ecdd-4ff8-aa63-79ae9983425f\") " pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.183822 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18-config\") pod \"service-ca-operator-777779d784-cbsq8\" (UID: \"80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cbsq8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.183855 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/5f673e58-d96d-4194-8c5e-53be672a7996-plugins-dir\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.183871 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqndt\" (UniqueName: \"kubernetes.io/projected/eeea8bdc-4b47-487e-bf46-1acbf11fc9bd-kube-api-access-xqndt\") pod \"service-ca-9c57cc56f-6wqbb\" (UID: \"eeea8bdc-4b47-487e-bf46-1acbf11fc9bd\") " pod="openshift-service-ca/service-ca-9c57cc56f-6wqbb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.183898 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cb8be15-fc6f-43f0-865e-96da3547155f-config\") pod \"kube-controller-manager-operator-78b949d7b-nmql7\" (UID: \"6cb8be15-fc6f-43f0-865e-96da3547155f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nmql7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.183914 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/080f2e20-451b-4825-89bd-2f35b98158b7-metrics-tls\") pod \"dns-default-t9jhb\" (UID: \"080f2e20-451b-4825-89bd-2f35b98158b7\") " pod="openshift-dns/dns-default-t9jhb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.183928 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3f373953-70e7-4d9b-a3a1-cc35e7255c44-config-volume\") pod \"collect-profiles-29424375-vhx57\" (UID: \"3f373953-70e7-4d9b-a3a1-cc35e7255c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.183977 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6f2s\" (UniqueName: \"kubernetes.io/projected/807380a2-3571-4626-b33a-71e79bdbf087-kube-api-access-p6f2s\") pod \"package-server-manager-789f6589d5-sgjk8\" (UID: \"807380a2-3571-4626-b33a-71e79bdbf087\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-sgjk8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.183993 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fv4k\" (UniqueName: \"kubernetes.io/projected/17ecb919-7205-4494-b314-db33c07f4bd4-kube-api-access-2fv4k\") pod \"machine-config-server-w2qz9\" (UID: \"17ecb919-7205-4494-b314-db33c07f4bd4\") " pod="openshift-machine-config-operator/machine-config-server-w2qz9" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184019 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wm2q7\" (UniqueName: \"kubernetes.io/projected/a3d8ec77-4a18-4938-970d-4d83f834a70e-kube-api-access-wm2q7\") pod \"multus-admission-controller-857f4d67dd-zsqv7\" (UID: \"a3d8ec77-4a18-4938-970d-4d83f834a70e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zsqv7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184038 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cc1eccc4-fb41-4aa2-8521-c6395edee227-images\") pod \"machine-config-operator-74547568cd-xp8q4\" (UID: \"cc1eccc4-fb41-4aa2-8521-c6395edee227\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184056 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/2c2d5543-2073-4952-b1b2-071e717d170f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-clbf8\" (UID: \"2c2d5543-2073-4952-b1b2-071e717d170f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-clbf8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184082 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-ca-trust-extracted\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184108 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dp74t\" (UniqueName: \"kubernetes.io/projected/5f673e58-d96d-4194-8c5e-53be672a7996-kube-api-access-dp74t\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184124 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bb4aacc0-eed5-4747-b97a-8fa71269177f-apiservice-cert\") pod \"packageserver-d55dfcdfc-mtc4n\" (UID: \"bb4aacc0-eed5-4747-b97a-8fa71269177f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184163 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-trusted-ca\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184179 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-installation-pull-secrets\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184196 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7xzc\" (UniqueName: \"kubernetes.io/projected/3f373953-70e7-4d9b-a3a1-cc35e7255c44-kube-api-access-b7xzc\") pod \"collect-profiles-29424375-vhx57\" (UID: \"3f373953-70e7-4d9b-a3a1-cc35e7255c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184222 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86crh\" (UniqueName: \"kubernetes.io/projected/0fd85a56-df39-46c5-82a5-808a37a3dc8e-kube-api-access-86crh\") pod \"dns-operator-744455d44c-z8tth\" (UID: \"0fd85a56-df39-46c5-82a5-808a37a3dc8e\") " pod="openshift-dns-operator/dns-operator-744455d44c-z8tth" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184262 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4x8n\" (UniqueName: \"kubernetes.io/projected/ef32fe73-cf65-4729-9f54-5cffddba74e1-kube-api-access-b4x8n\") pod \"ingress-operator-5b745b69d9-r5xkr\" (UID: \"ef32fe73-cf65-4729-9f54-5cffddba74e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184278 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lltql\" (UniqueName: \"kubernetes.io/projected/bb4aacc0-eed5-4747-b97a-8fa71269177f-kube-api-access-lltql\") pod \"packageserver-d55dfcdfc-mtc4n\" (UID: \"bb4aacc0-eed5-4747-b97a-8fa71269177f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184294 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-registry-certificates\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184309 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/17ecb919-7205-4494-b314-db33c07f4bd4-certs\") pod \"machine-config-server-w2qz9\" (UID: \"17ecb919-7205-4494-b314-db33c07f4bd4\") " pod="openshift-machine-config-operator/machine-config-server-w2qz9" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184350 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/eeea8bdc-4b47-487e-bf46-1acbf11fc9bd-signing-key\") pod \"service-ca-9c57cc56f-6wqbb\" (UID: \"eeea8bdc-4b47-487e-bf46-1acbf11fc9bd\") " pod="openshift-service-ca/service-ca-9c57cc56f-6wqbb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184376 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f3ec5c48-884c-4a5c-8651-24cd598a9ec2-cert\") pod \"ingress-canary-4b7r8\" (UID: \"f3ec5c48-884c-4a5c-8651-24cd598a9ec2\") " pod="openshift-ingress-canary/ingress-canary-4b7r8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184422 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/eeea8bdc-4b47-487e-bf46-1acbf11fc9bd-signing-cabundle\") pod \"service-ca-9c57cc56f-6wqbb\" (UID: \"eeea8bdc-4b47-487e-bf46-1acbf11fc9bd\") " pod="openshift-service-ca/service-ca-9c57cc56f-6wqbb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184458 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f62b5697-8059-4222-b731-2f8188e9a270-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5n4lj\" (UID: \"f62b5697-8059-4222-b731-2f8188e9a270\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5n4lj" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184480 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltfsg\" (UniqueName: \"kubernetes.io/projected/7b031965-e2b9-4d51-b598-9b8366944ee5-kube-api-access-ltfsg\") pod \"olm-operator-6b444d44fb-b4ml2\" (UID: \"7b031965-e2b9-4d51-b598-9b8366944ee5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184527 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0fd85a56-df39-46c5-82a5-808a37a3dc8e-metrics-tls\") pod \"dns-operator-744455d44c-z8tth\" (UID: \"0fd85a56-df39-46c5-82a5-808a37a3dc8e\") " pod="openshift-dns-operator/dns-operator-744455d44c-z8tth" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184558 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c66d4\" (UniqueName: \"kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-kube-api-access-c66d4\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184579 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/17ecb919-7205-4494-b314-db33c07f4bd4-node-bootstrap-token\") pod \"machine-config-server-w2qz9\" (UID: \"17ecb919-7205-4494-b314-db33c07f4bd4\") " pod="openshift-machine-config-operator/machine-config-server-w2qz9" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184606 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00f7dbc0-5b6f-4f74-a4e4-43759758be95-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-r2jbn\" (UID: \"00f7dbc0-5b6f-4f74-a4e4-43759758be95\") " pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184623 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3f373953-70e7-4d9b-a3a1-cc35e7255c44-secret-volume\") pod \"collect-profiles-29424375-vhx57\" (UID: \"3f373953-70e7-4d9b-a3a1-cc35e7255c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184650 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-bound-sa-token\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.184667 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6cb8be15-fc6f-43f0-865e-96da3547155f-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-nmql7\" (UID: \"6cb8be15-fc6f-43f0-865e-96da3547155f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nmql7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.185190 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cc1eccc4-fb41-4aa2-8521-c6395edee227-auth-proxy-config\") pod \"machine-config-operator-74547568cd-xp8q4\" (UID: \"cc1eccc4-fb41-4aa2-8521-c6395edee227\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.185243 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/7731421f-ecdd-4ff8-aa63-79ae9983425f-ready\") pod \"cni-sysctl-allowlist-ds-vl4d4\" (UID: \"7731421f-ecdd-4ff8-aa63-79ae9983425f\") " pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.185304 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.185336 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/807380a2-3571-4626-b33a-71e79bdbf087-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-sgjk8\" (UID: \"807380a2-3571-4626-b33a-71e79bdbf087\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-sgjk8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.185588 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6cb8be15-fc6f-43f0-865e-96da3547155f-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-nmql7\" (UID: \"6cb8be15-fc6f-43f0-865e-96da3547155f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nmql7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.185636 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f62b5697-8059-4222-b731-2f8188e9a270-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5n4lj\" (UID: \"f62b5697-8059-4222-b731-2f8188e9a270\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5n4lj" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.185657 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bb4aacc0-eed5-4747-b97a-8fa71269177f-webhook-cert\") pod \"packageserver-d55dfcdfc-mtc4n\" (UID: \"bb4aacc0-eed5-4747-b97a-8fa71269177f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.185678 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/bb4aacc0-eed5-4747-b97a-8fa71269177f-tmpfs\") pod \"packageserver-d55dfcdfc-mtc4n\" (UID: \"bb4aacc0-eed5-4747-b97a-8fa71269177f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.185752 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a3d8ec77-4a18-4938-970d-4d83f834a70e-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-zsqv7\" (UID: \"a3d8ec77-4a18-4938-970d-4d83f834a70e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zsqv7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.185782 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7b031965-e2b9-4d51-b598-9b8366944ee5-srv-cert\") pod \"olm-operator-6b444d44fb-b4ml2\" (UID: \"7b031965-e2b9-4d51-b598-9b8366944ee5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.185805 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ef32fe73-cf65-4729-9f54-5cffddba74e1-metrics-tls\") pod \"ingress-operator-5b745b69d9-r5xkr\" (UID: \"ef32fe73-cf65-4729-9f54-5cffddba74e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.185825 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18-serving-cert\") pod \"service-ca-operator-777779d784-cbsq8\" (UID: \"80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cbsq8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.185847 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlwd6\" (UniqueName: \"kubernetes.io/projected/80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18-kube-api-access-xlwd6\") pod \"service-ca-operator-777779d784-cbsq8\" (UID: \"80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cbsq8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.185936 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-registry-tls\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.185980 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7b031965-e2b9-4d51-b598-9b8366944ee5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-b4ml2\" (UID: \"7b031965-e2b9-4d51-b598-9b8366944ee5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.186005 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cc1eccc4-fb41-4aa2-8521-c6395edee227-proxy-tls\") pod \"machine-config-operator-74547568cd-xp8q4\" (UID: \"cc1eccc4-fb41-4aa2-8521-c6395edee227\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.188922 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwvwv\" (UniqueName: \"kubernetes.io/projected/cc1eccc4-fb41-4aa2-8521-c6395edee227-kube-api-access-rwvwv\") pod \"machine-config-operator-74547568cd-xp8q4\" (UID: \"cc1eccc4-fb41-4aa2-8521-c6395edee227\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.189004 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/5f673e58-d96d-4194-8c5e-53be672a7996-socket-dir\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.189073 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f62b5697-8059-4222-b731-2f8188e9a270-config\") pod \"kube-apiserver-operator-766d6c64bb-5n4lj\" (UID: \"f62b5697-8059-4222-b731-2f8188e9a270\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5n4lj" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.189124 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7731421f-ecdd-4ff8-aa63-79ae9983425f-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-vl4d4\" (UID: \"7731421f-ecdd-4ff8-aa63-79ae9983425f\") " pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.189176 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ef32fe73-cf65-4729-9f54-5cffddba74e1-trusted-ca\") pod \"ingress-operator-5b745b69d9-r5xkr\" (UID: \"ef32fe73-cf65-4729-9f54-5cffddba74e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.189199 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ef32fe73-cf65-4729-9f54-5cffddba74e1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-r5xkr\" (UID: \"ef32fe73-cf65-4729-9f54-5cffddba74e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.189224 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4psmc\" (UniqueName: \"kubernetes.io/projected/00f7dbc0-5b6f-4f74-a4e4-43759758be95-kube-api-access-4psmc\") pod \"marketplace-operator-79b997595-r2jbn\" (UID: \"00f7dbc0-5b6f-4f74-a4e4-43759758be95\") " pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.189721 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-registry-certificates\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.193624 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-ca-trust-extracted\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.198467 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-czjn5"] Dec 11 14:15:22 crc kubenswrapper[4690]: E1211 14:15:22.199303 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:22.699272919 +0000 UTC m=+54.314674572 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.199519 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/5f673e58-d96d-4194-8c5e-53be672a7996-mountpoint-dir\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.199581 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g77vp\" (UniqueName: \"kubernetes.io/projected/080f2e20-451b-4825-89bd-2f35b98158b7-kube-api-access-g77vp\") pod \"dns-default-t9jhb\" (UID: \"080f2e20-451b-4825-89bd-2f35b98158b7\") " pod="openshift-dns/dns-default-t9jhb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.199635 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2j4cb\" (UniqueName: \"kubernetes.io/projected/f3ec5c48-884c-4a5c-8651-24cd598a9ec2-kube-api-access-2j4cb\") pod \"ingress-canary-4b7r8\" (UID: \"f3ec5c48-884c-4a5c-8651-24cd598a9ec2\") " pod="openshift-ingress-canary/ingress-canary-4b7r8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.199702 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6xj4\" (UniqueName: \"kubernetes.io/projected/2c2d5543-2073-4952-b1b2-071e717d170f-kube-api-access-t6xj4\") pod \"control-plane-machine-set-operator-78cbb6b69f-clbf8\" (UID: \"2c2d5543-2073-4952-b1b2-071e717d170f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-clbf8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.199739 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/5f673e58-d96d-4194-8c5e-53be672a7996-registration-dir\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.199761 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/5f673e58-d96d-4194-8c5e-53be672a7996-csi-data-dir\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.199787 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/00f7dbc0-5b6f-4f74-a4e4-43759758be95-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-r2jbn\" (UID: \"00f7dbc0-5b6f-4f74-a4e4-43759758be95\") " pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.202690 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f62b5697-8059-4222-b731-2f8188e9a270-config\") pod \"kube-apiserver-operator-766d6c64bb-5n4lj\" (UID: \"f62b5697-8059-4222-b731-2f8188e9a270\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5n4lj" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.204305 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ef32fe73-cf65-4729-9f54-5cffddba74e1-trusted-ca\") pod \"ingress-operator-5b745b69d9-r5xkr\" (UID: \"ef32fe73-cf65-4729-9f54-5cffddba74e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.204656 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-trusted-ca\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.205770 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-installation-pull-secrets\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.207091 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6cb8be15-fc6f-43f0-865e-96da3547155f-config\") pod \"kube-controller-manager-operator-78b949d7b-nmql7\" (UID: \"6cb8be15-fc6f-43f0-865e-96da3547155f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nmql7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.208225 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ef32fe73-cf65-4729-9f54-5cffddba74e1-metrics-tls\") pod \"ingress-operator-5b745b69d9-r5xkr\" (UID: \"ef32fe73-cf65-4729-9f54-5cffddba74e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.210529 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0fd85a56-df39-46c5-82a5-808a37a3dc8e-metrics-tls\") pod \"dns-operator-744455d44c-z8tth\" (UID: \"0fd85a56-df39-46c5-82a5-808a37a3dc8e\") " pod="openshift-dns-operator/dns-operator-744455d44c-z8tth" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.218342 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/a3d8ec77-4a18-4938-970d-4d83f834a70e-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-zsqv7\" (UID: \"a3d8ec77-4a18-4938-970d-4d83f834a70e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zsqv7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.222391 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-registry-tls\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.222589 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6cb8be15-fc6f-43f0-865e-96da3547155f-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-nmql7\" (UID: \"6cb8be15-fc6f-43f0-865e-96da3547155f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nmql7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.222830 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/7b031965-e2b9-4d51-b598-9b8366944ee5-profile-collector-cert\") pod \"olm-operator-6b444d44fb-b4ml2\" (UID: \"7b031965-e2b9-4d51-b598-9b8366944ee5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.222909 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/807380a2-3571-4626-b33a-71e79bdbf087-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-sgjk8\" (UID: \"807380a2-3571-4626-b33a-71e79bdbf087\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-sgjk8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.223095 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/7b031965-e2b9-4d51-b598-9b8366944ee5-srv-cert\") pod \"olm-operator-6b444d44fb-b4ml2\" (UID: \"7b031965-e2b9-4d51-b598-9b8366944ee5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.227205 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.227873 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86crh\" (UniqueName: \"kubernetes.io/projected/0fd85a56-df39-46c5-82a5-808a37a3dc8e-kube-api-access-86crh\") pod \"dns-operator-744455d44c-z8tth\" (UID: \"0fd85a56-df39-46c5-82a5-808a37a3dc8e\") " pod="openshift-dns-operator/dns-operator-744455d44c-z8tth" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.228165 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-8l25g"] Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.236572 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f62b5697-8059-4222-b731-2f8188e9a270-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5n4lj\" (UID: \"f62b5697-8059-4222-b731-2f8188e9a270\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5n4lj" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.240708 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c66d4\" (UniqueName: \"kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-kube-api-access-c66d4\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.247558 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" event={"ID":"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede","Type":"ContainerStarted","Data":"0f07ab1920486a137862ab19e3fe97deb495926dc5359de8f8d060461e8c93c6"} Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.255822 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" event={"ID":"5d3409f3-3730-479c-b48b-e3829fba88ae","Type":"ContainerStarted","Data":"6bbae4252e68e79c9afbefcce64d7d54e93478db36e805d5149ad35acb7d2a8d"} Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.255889 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" event={"ID":"5d3409f3-3730-479c-b48b-e3829fba88ae","Type":"ContainerStarted","Data":"b86206ff18872cc877ee2fefd617a8e82840a31d2cf6e105a75f3f1ed773cb81"} Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.257005 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.270156 4690 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-v47hs container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.270211 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" podUID="5d3409f3-3730-479c-b48b-e3829fba88ae" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 11 14:15:22 crc kubenswrapper[4690]: W1211 14:15:22.272097 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podac076d6f_30a7_4230_8320_936d709bf640.slice/crio-69813d71df028176b3a1005cee317693e80b089983514df0028ab18cd5a17d65 WatchSource:0}: Error finding container 69813d71df028176b3a1005cee317693e80b089983514df0028ab18cd5a17d65: Status 404 returned error can't find the container with id 69813d71df028176b3a1005cee317693e80b089983514df0028ab18cd5a17d65 Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.274664 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.289290 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/6cb8be15-fc6f-43f0-865e-96da3547155f-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-nmql7\" (UID: \"6cb8be15-fc6f-43f0-865e-96da3547155f\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nmql7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.296041 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-bound-sa-token\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.301938 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.302485 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cc1eccc4-fb41-4aa2-8521-c6395edee227-proxy-tls\") pod \"machine-config-operator-74547568cd-xp8q4\" (UID: \"cc1eccc4-fb41-4aa2-8521-c6395edee227\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.302533 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwvwv\" (UniqueName: \"kubernetes.io/projected/cc1eccc4-fb41-4aa2-8521-c6395edee227-kube-api-access-rwvwv\") pod \"machine-config-operator-74547568cd-xp8q4\" (UID: \"cc1eccc4-fb41-4aa2-8521-c6395edee227\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.302562 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/5f673e58-d96d-4194-8c5e-53be672a7996-socket-dir\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.302600 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7731421f-ecdd-4ff8-aa63-79ae9983425f-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-vl4d4\" (UID: \"7731421f-ecdd-4ff8-aa63-79ae9983425f\") " pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.302636 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4psmc\" (UniqueName: \"kubernetes.io/projected/00f7dbc0-5b6f-4f74-a4e4-43759758be95-kube-api-access-4psmc\") pod \"marketplace-operator-79b997595-r2jbn\" (UID: \"00f7dbc0-5b6f-4f74-a4e4-43759758be95\") " pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.302674 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/5f673e58-d96d-4194-8c5e-53be672a7996-mountpoint-dir\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.302704 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g77vp\" (UniqueName: \"kubernetes.io/projected/080f2e20-451b-4825-89bd-2f35b98158b7-kube-api-access-g77vp\") pod \"dns-default-t9jhb\" (UID: \"080f2e20-451b-4825-89bd-2f35b98158b7\") " pod="openshift-dns/dns-default-t9jhb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.302728 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2j4cb\" (UniqueName: \"kubernetes.io/projected/f3ec5c48-884c-4a5c-8651-24cd598a9ec2-kube-api-access-2j4cb\") pod \"ingress-canary-4b7r8\" (UID: \"f3ec5c48-884c-4a5c-8651-24cd598a9ec2\") " pod="openshift-ingress-canary/ingress-canary-4b7r8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.302751 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6xj4\" (UniqueName: \"kubernetes.io/projected/2c2d5543-2073-4952-b1b2-071e717d170f-kube-api-access-t6xj4\") pod \"control-plane-machine-set-operator-78cbb6b69f-clbf8\" (UID: \"2c2d5543-2073-4952-b1b2-071e717d170f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-clbf8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.302771 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/5f673e58-d96d-4194-8c5e-53be672a7996-registration-dir\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.302792 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/5f673e58-d96d-4194-8c5e-53be672a7996-csi-data-dir\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.302810 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/00f7dbc0-5b6f-4f74-a4e4-43759758be95-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-r2jbn\" (UID: \"00f7dbc0-5b6f-4f74-a4e4-43759758be95\") " pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.302830 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/080f2e20-451b-4825-89bd-2f35b98158b7-config-volume\") pod \"dns-default-t9jhb\" (UID: \"080f2e20-451b-4825-89bd-2f35b98158b7\") " pod="openshift-dns/dns-default-t9jhb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.302857 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86vrb\" (UniqueName: \"kubernetes.io/projected/7731421f-ecdd-4ff8-aa63-79ae9983425f-kube-api-access-86vrb\") pod \"cni-sysctl-allowlist-ds-vl4d4\" (UID: \"7731421f-ecdd-4ff8-aa63-79ae9983425f\") " pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.302877 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7731421f-ecdd-4ff8-aa63-79ae9983425f-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-vl4d4\" (UID: \"7731421f-ecdd-4ff8-aa63-79ae9983425f\") " pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.304727 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/5f673e58-d96d-4194-8c5e-53be672a7996-socket-dir\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.304738 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/5f673e58-d96d-4194-8c5e-53be672a7996-csi-data-dir\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.304668 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/5f673e58-d96d-4194-8c5e-53be672a7996-mountpoint-dir\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.304774 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18-config\") pod \"service-ca-operator-777779d784-cbsq8\" (UID: \"80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cbsq8" Dec 11 14:15:22 crc kubenswrapper[4690]: E1211 14:15:22.304842 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:22.804824139 +0000 UTC m=+54.420225782 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.304870 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/5f673e58-d96d-4194-8c5e-53be672a7996-plugins-dir\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.304899 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqndt\" (UniqueName: \"kubernetes.io/projected/eeea8bdc-4b47-487e-bf46-1acbf11fc9bd-kube-api-access-xqndt\") pod \"service-ca-9c57cc56f-6wqbb\" (UID: \"eeea8bdc-4b47-487e-bf46-1acbf11fc9bd\") " pod="openshift-service-ca/service-ca-9c57cc56f-6wqbb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.304966 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/080f2e20-451b-4825-89bd-2f35b98158b7-metrics-tls\") pod \"dns-default-t9jhb\" (UID: \"080f2e20-451b-4825-89bd-2f35b98158b7\") " pod="openshift-dns/dns-default-t9jhb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.304990 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3f373953-70e7-4d9b-a3a1-cc35e7255c44-config-volume\") pod \"collect-profiles-29424375-vhx57\" (UID: \"3f373953-70e7-4d9b-a3a1-cc35e7255c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.304995 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/5f673e58-d96d-4194-8c5e-53be672a7996-registration-dir\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305034 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fv4k\" (UniqueName: \"kubernetes.io/projected/17ecb919-7205-4494-b314-db33c07f4bd4-kube-api-access-2fv4k\") pod \"machine-config-server-w2qz9\" (UID: \"17ecb919-7205-4494-b314-db33c07f4bd4\") " pod="openshift-machine-config-operator/machine-config-server-w2qz9" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305060 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/5f673e58-d96d-4194-8c5e-53be672a7996-plugins-dir\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305066 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cc1eccc4-fb41-4aa2-8521-c6395edee227-images\") pod \"machine-config-operator-74547568cd-xp8q4\" (UID: \"cc1eccc4-fb41-4aa2-8521-c6395edee227\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305117 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/2c2d5543-2073-4952-b1b2-071e717d170f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-clbf8\" (UID: \"2c2d5543-2073-4952-b1b2-071e717d170f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-clbf8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305153 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dp74t\" (UniqueName: \"kubernetes.io/projected/5f673e58-d96d-4194-8c5e-53be672a7996-kube-api-access-dp74t\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305185 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bb4aacc0-eed5-4747-b97a-8fa71269177f-apiservice-cert\") pod \"packageserver-d55dfcdfc-mtc4n\" (UID: \"bb4aacc0-eed5-4747-b97a-8fa71269177f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305216 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7xzc\" (UniqueName: \"kubernetes.io/projected/3f373953-70e7-4d9b-a3a1-cc35e7255c44-kube-api-access-b7xzc\") pod \"collect-profiles-29424375-vhx57\" (UID: \"3f373953-70e7-4d9b-a3a1-cc35e7255c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305268 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lltql\" (UniqueName: \"kubernetes.io/projected/bb4aacc0-eed5-4747-b97a-8fa71269177f-kube-api-access-lltql\") pod \"packageserver-d55dfcdfc-mtc4n\" (UID: \"bb4aacc0-eed5-4747-b97a-8fa71269177f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305298 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/17ecb919-7205-4494-b314-db33c07f4bd4-certs\") pod \"machine-config-server-w2qz9\" (UID: \"17ecb919-7205-4494-b314-db33c07f4bd4\") " pod="openshift-machine-config-operator/machine-config-server-w2qz9" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305326 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/eeea8bdc-4b47-487e-bf46-1acbf11fc9bd-signing-key\") pod \"service-ca-9c57cc56f-6wqbb\" (UID: \"eeea8bdc-4b47-487e-bf46-1acbf11fc9bd\") " pod="openshift-service-ca/service-ca-9c57cc56f-6wqbb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305354 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f3ec5c48-884c-4a5c-8651-24cd598a9ec2-cert\") pod \"ingress-canary-4b7r8\" (UID: \"f3ec5c48-884c-4a5c-8651-24cd598a9ec2\") " pod="openshift-ingress-canary/ingress-canary-4b7r8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305394 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/eeea8bdc-4b47-487e-bf46-1acbf11fc9bd-signing-cabundle\") pod \"service-ca-9c57cc56f-6wqbb\" (UID: \"eeea8bdc-4b47-487e-bf46-1acbf11fc9bd\") " pod="openshift-service-ca/service-ca-9c57cc56f-6wqbb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305454 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/17ecb919-7205-4494-b314-db33c07f4bd4-node-bootstrap-token\") pod \"machine-config-server-w2qz9\" (UID: \"17ecb919-7205-4494-b314-db33c07f4bd4\") " pod="openshift-machine-config-operator/machine-config-server-w2qz9" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305487 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00f7dbc0-5b6f-4f74-a4e4-43759758be95-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-r2jbn\" (UID: \"00f7dbc0-5b6f-4f74-a4e4-43759758be95\") " pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305516 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3f373953-70e7-4d9b-a3a1-cc35e7255c44-secret-volume\") pod \"collect-profiles-29424375-vhx57\" (UID: \"3f373953-70e7-4d9b-a3a1-cc35e7255c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305564 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cc1eccc4-fb41-4aa2-8521-c6395edee227-auth-proxy-config\") pod \"machine-config-operator-74547568cd-xp8q4\" (UID: \"cc1eccc4-fb41-4aa2-8521-c6395edee227\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305592 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/7731421f-ecdd-4ff8-aa63-79ae9983425f-ready\") pod \"cni-sysctl-allowlist-ds-vl4d4\" (UID: \"7731421f-ecdd-4ff8-aa63-79ae9983425f\") " pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305636 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305682 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bb4aacc0-eed5-4747-b97a-8fa71269177f-webhook-cert\") pod \"packageserver-d55dfcdfc-mtc4n\" (UID: \"bb4aacc0-eed5-4747-b97a-8fa71269177f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305713 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/bb4aacc0-eed5-4747-b97a-8fa71269177f-tmpfs\") pod \"packageserver-d55dfcdfc-mtc4n\" (UID: \"bb4aacc0-eed5-4747-b97a-8fa71269177f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305747 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18-serving-cert\") pod \"service-ca-operator-777779d784-cbsq8\" (UID: \"80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cbsq8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.305831 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cc1eccc4-fb41-4aa2-8521-c6395edee227-images\") pod \"machine-config-operator-74547568cd-xp8q4\" (UID: \"cc1eccc4-fb41-4aa2-8521-c6395edee227\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.306472 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-z8tth" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.307311 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3f373953-70e7-4d9b-a3a1-cc35e7255c44-config-volume\") pod \"collect-profiles-29424375-vhx57\" (UID: \"3f373953-70e7-4d9b-a3a1-cc35e7255c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.309620 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/080f2e20-451b-4825-89bd-2f35b98158b7-metrics-tls\") pod \"dns-default-t9jhb\" (UID: \"080f2e20-451b-4825-89bd-2f35b98158b7\") " pod="openshift-dns/dns-default-t9jhb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.311250 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/00f7dbc0-5b6f-4f74-a4e4-43759758be95-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-r2jbn\" (UID: \"00f7dbc0-5b6f-4f74-a4e4-43759758be95\") " pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.311822 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/080f2e20-451b-4825-89bd-2f35b98158b7-config-volume\") pod \"dns-default-t9jhb\" (UID: \"080f2e20-451b-4825-89bd-2f35b98158b7\") " pod="openshift-dns/dns-default-t9jhb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.311848 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7731421f-ecdd-4ff8-aa63-79ae9983425f-cni-sysctl-allowlist\") pod \"cni-sysctl-allowlist-ds-vl4d4\" (UID: \"7731421f-ecdd-4ff8-aa63-79ae9983425f\") " pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.303598 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4lfzc" event={"ID":"08650951-b99f-42e0-8321-9b1b9560f1cb","Type":"ContainerStarted","Data":"f0b7c079b54f99a5844fa5f303163643fbb62036b36b27a9cdd91b79b08b0b93"} Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.311913 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4lfzc" event={"ID":"08650951-b99f-42e0-8321-9b1b9560f1cb","Type":"ContainerStarted","Data":"4196506cf8b496432f4a826ee4111bf7d8e77e1602f2b27e0f81267effb959e2"} Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.304542 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7731421f-ecdd-4ff8-aa63-79ae9983425f-tuning-conf-dir\") pod \"cni-sysctl-allowlist-ds-vl4d4\" (UID: \"7731421f-ecdd-4ff8-aa63-79ae9983425f\") " pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.313999 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-crxzj"] Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.314335 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/7731421f-ecdd-4ff8-aa63-79ae9983425f-ready\") pod \"cni-sysctl-allowlist-ds-vl4d4\" (UID: \"7731421f-ecdd-4ff8-aa63-79ae9983425f\") " pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.315780 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlwd6\" (UniqueName: \"kubernetes.io/projected/80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18-kube-api-access-xlwd6\") pod \"service-ca-operator-777779d784-cbsq8\" (UID: \"80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cbsq8" Dec 11 14:15:22 crc kubenswrapper[4690]: E1211 14:15:22.316092 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:22.816065283 +0000 UTC m=+54.431466916 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.316144 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/17ecb919-7205-4494-b314-db33c07f4bd4-node-bootstrap-token\") pod \"machine-config-server-w2qz9\" (UID: \"17ecb919-7205-4494-b314-db33c07f4bd4\") " pod="openshift-machine-config-operator/machine-config-server-w2qz9" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.316499 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/cc1eccc4-fb41-4aa2-8521-c6395edee227-auth-proxy-config\") pod \"machine-config-operator-74547568cd-xp8q4\" (UID: \"cc1eccc4-fb41-4aa2-8521-c6395edee227\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.316999 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/eeea8bdc-4b47-487e-bf46-1acbf11fc9bd-signing-cabundle\") pod \"service-ca-9c57cc56f-6wqbb\" (UID: \"eeea8bdc-4b47-487e-bf46-1acbf11fc9bd\") " pod="openshift-service-ca/service-ca-9c57cc56f-6wqbb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.317359 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/bb4aacc0-eed5-4747-b97a-8fa71269177f-tmpfs\") pod \"packageserver-d55dfcdfc-mtc4n\" (UID: \"bb4aacc0-eed5-4747-b97a-8fa71269177f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.318111 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00f7dbc0-5b6f-4f74-a4e4-43759758be95-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-r2jbn\" (UID: \"00f7dbc0-5b6f-4f74-a4e4-43759758be95\") " pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.318445 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nmql7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.319747 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18-config\") pod \"service-ca-operator-777779d784-cbsq8\" (UID: \"80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cbsq8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.322859 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3f373953-70e7-4d9b-a3a1-cc35e7255c44-secret-volume\") pod \"collect-profiles-29424375-vhx57\" (UID: \"3f373953-70e7-4d9b-a3a1-cc35e7255c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.327080 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6f2s\" (UniqueName: \"kubernetes.io/projected/807380a2-3571-4626-b33a-71e79bdbf087-kube-api-access-p6f2s\") pod \"package-server-manager-789f6589d5-sgjk8\" (UID: \"807380a2-3571-4626-b33a-71e79bdbf087\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-sgjk8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.327143 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wm2q7\" (UniqueName: \"kubernetes.io/projected/a3d8ec77-4a18-4938-970d-4d83f834a70e-kube-api-access-wm2q7\") pod \"multus-admission-controller-857f4d67dd-zsqv7\" (UID: \"a3d8ec77-4a18-4938-970d-4d83f834a70e\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zsqv7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.327218 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/bb4aacc0-eed5-4747-b97a-8fa71269177f-apiservice-cert\") pod \"packageserver-d55dfcdfc-mtc4n\" (UID: \"bb4aacc0-eed5-4747-b97a-8fa71269177f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.328456 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/cc1eccc4-fb41-4aa2-8521-c6395edee227-proxy-tls\") pod \"machine-config-operator-74547568cd-xp8q4\" (UID: \"cc1eccc4-fb41-4aa2-8521-c6395edee227\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.328754 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/eeea8bdc-4b47-487e-bf46-1acbf11fc9bd-signing-key\") pod \"service-ca-9c57cc56f-6wqbb\" (UID: \"eeea8bdc-4b47-487e-bf46-1acbf11fc9bd\") " pod="openshift-service-ca/service-ca-9c57cc56f-6wqbb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.328918 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/17ecb919-7205-4494-b314-db33c07f4bd4-certs\") pod \"machine-config-server-w2qz9\" (UID: \"17ecb919-7205-4494-b314-db33c07f4bd4\") " pod="openshift-machine-config-operator/machine-config-server-w2qz9" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.330693 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f3ec5c48-884c-4a5c-8651-24cd598a9ec2-cert\") pod \"ingress-canary-4b7r8\" (UID: \"f3ec5c48-884c-4a5c-8651-24cd598a9ec2\") " pod="openshift-ingress-canary/ingress-canary-4b7r8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.355898 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18-serving-cert\") pod \"service-ca-operator-777779d784-cbsq8\" (UID: \"80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cbsq8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.356148 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/bb4aacc0-eed5-4747-b97a-8fa71269177f-webhook-cert\") pod \"packageserver-d55dfcdfc-mtc4n\" (UID: \"bb4aacc0-eed5-4747-b97a-8fa71269177f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.356493 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-jzm89" event={"ID":"9211752e-9c0b-43ea-9c4d-5d91fcb472db","Type":"ContainerStarted","Data":"503ef073dcd5dfff77f82f1de76a17d75ba77a4647e1f7f4dab385f8a615e9c9"} Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.363687 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-jzm89" event={"ID":"9211752e-9c0b-43ea-9c4d-5d91fcb472db","Type":"ContainerStarted","Data":"c686bc91661b00bbc221af3bb386ce733b3ee376e738ebfbf69427063992de3c"} Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.356932 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f62b5697-8059-4222-b731-2f8188e9a270-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5n4lj\" (UID: \"f62b5697-8059-4222-b731-2f8188e9a270\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5n4lj" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.363504 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltfsg\" (UniqueName: \"kubernetes.io/projected/7b031965-e2b9-4d51-b598-9b8366944ee5-kube-api-access-ltfsg\") pod \"olm-operator-6b444d44fb-b4ml2\" (UID: \"7b031965-e2b9-4d51-b598-9b8366944ee5\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.356797 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/2c2d5543-2073-4952-b1b2-071e717d170f-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-clbf8\" (UID: \"2c2d5543-2073-4952-b1b2-071e717d170f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-clbf8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.366027 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-rb4j9" event={"ID":"fbdc74c8-8b96-479b-b06c-637acb1bb68a","Type":"ContainerStarted","Data":"4220098c9b2c9a39a590b1e4e29a31c3f76e7ab7426716b1499c22aa4822dd1c"} Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.372037 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-8klv5" event={"ID":"4ac34e73-25e1-449d-9c43-e6bc5054ede8","Type":"ContainerStarted","Data":"8c6c45d6286a02169c555061e931b1d5c82a9dc372d5f790a244aaca98c96d34"} Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.372104 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-8klv5" event={"ID":"4ac34e73-25e1-449d-9c43-e6bc5054ede8","Type":"ContainerStarted","Data":"4700bb0442ee7c2f28aa4fa496838e4a2b904708775b157da9304bee2b9b15eb"} Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.377440 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-8klv5" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.384502 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4x8n\" (UniqueName: \"kubernetes.io/projected/ef32fe73-cf65-4729-9f54-5cffddba74e1-kube-api-access-b4x8n\") pod \"ingress-operator-5b745b69d9-r5xkr\" (UID: \"ef32fe73-cf65-4729-9f54-5cffddba74e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.389542 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9wx9g"] Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.398392 4690 patch_prober.go:28] interesting pod/downloads-7954f5f757-8klv5 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.398464 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8klv5" podUID="4ac34e73-25e1-449d-9c43-e6bc5054ede8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.408228 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" event={"ID":"3455abcd-3a0d-4376-908c-81484c62002a","Type":"ContainerStarted","Data":"c33d6452fbe2c5c9fef6c7593c9b5df060fef6103f73a787dfa0c25eb169d5db"} Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.408275 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" event={"ID":"3455abcd-3a0d-4376-908c-81484c62002a","Type":"ContainerStarted","Data":"24261bf76af5594e107cf452464c4be243aa69c1333498732998cd6da2ccfef4"} Dec 11 14:15:22 crc kubenswrapper[4690]: W1211 14:15:22.415665 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f1e1cca_4214_4752_8bc4_1972df6de928.slice/crio-a4c6f664eff4117dec23224c8acc2bbca422f687bce3c28aea88c7d0785036cc WatchSource:0}: Error finding container a4c6f664eff4117dec23224c8acc2bbca422f687bce3c28aea88c7d0785036cc: Status 404 returned error can't find the container with id a4c6f664eff4117dec23224c8acc2bbca422f687bce3c28aea88c7d0785036cc Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.416521 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:22 crc kubenswrapper[4690]: E1211 14:15:22.416681 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:22.916649918 +0000 UTC m=+54.532051571 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.416897 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: E1211 14:15:22.420091 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:22.920072925 +0000 UTC m=+54.535474568 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.420410 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ef32fe73-cf65-4729-9f54-5cffddba74e1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-r5xkr\" (UID: \"ef32fe73-cf65-4729-9f54-5cffddba74e1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.423537 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.436810 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-zsqv7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.442494 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g77vp\" (UniqueName: \"kubernetes.io/projected/080f2e20-451b-4825-89bd-2f35b98158b7-kube-api-access-g77vp\") pod \"dns-default-t9jhb\" (UID: \"080f2e20-451b-4825-89bd-2f35b98158b7\") " pod="openshift-dns/dns-default-t9jhb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.445848 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-sgjk8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.452836 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc"] Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.467987 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4psmc\" (UniqueName: \"kubernetes.io/projected/00f7dbc0-5b6f-4f74-a4e4-43759758be95-kube-api-access-4psmc\") pod \"marketplace-operator-79b997595-r2jbn\" (UID: \"00f7dbc0-5b6f-4f74-a4e4-43759758be95\") " pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.491451 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2j4cb\" (UniqueName: \"kubernetes.io/projected/f3ec5c48-884c-4a5c-8651-24cd598a9ec2-kube-api-access-2j4cb\") pod \"ingress-canary-4b7r8\" (UID: \"f3ec5c48-884c-4a5c-8651-24cd598a9ec2\") " pod="openshift-ingress-canary/ingress-canary-4b7r8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.497584 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6"] Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.506284 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6xj4\" (UniqueName: \"kubernetes.io/projected/2c2d5543-2073-4952-b1b2-071e717d170f-kube-api-access-t6xj4\") pod \"control-plane-machine-set-operator-78cbb6b69f-clbf8\" (UID: \"2c2d5543-2073-4952-b1b2-071e717d170f\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-clbf8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.516506 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-clbf8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.518493 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:22 crc kubenswrapper[4690]: E1211 14:15:22.521308 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:23.021280585 +0000 UTC m=+54.636682268 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.526528 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqndt\" (UniqueName: \"kubernetes.io/projected/eeea8bdc-4b47-487e-bf46-1acbf11fc9bd-kube-api-access-xqndt\") pod \"service-ca-9c57cc56f-6wqbb\" (UID: \"eeea8bdc-4b47-487e-bf46-1acbf11fc9bd\") " pod="openshift-service-ca/service-ca-9c57cc56f-6wqbb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.544697 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb"] Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.547892 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-t9jhb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.557371 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fv4k\" (UniqueName: \"kubernetes.io/projected/17ecb919-7205-4494-b314-db33c07f4bd4-kube-api-access-2fv4k\") pod \"machine-config-server-w2qz9\" (UID: \"17ecb919-7205-4494-b314-db33c07f4bd4\") " pod="openshift-machine-config-operator/machine-config-server-w2qz9" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.557390 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-4b7r8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.563631 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86vrb\" (UniqueName: \"kubernetes.io/projected/7731421f-ecdd-4ff8-aa63-79ae9983425f-kube-api-access-86vrb\") pod \"cni-sysctl-allowlist-ds-vl4d4\" (UID: \"7731421f-ecdd-4ff8-aa63-79ae9983425f\") " pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.570448 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.570966 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5n4lj" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.581990 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zl7lm"] Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.582233 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-w2qz9" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.586184 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-6mnd7"] Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.595544 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.597780 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bfp74"] Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.601541 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwvwv\" (UniqueName: \"kubernetes.io/projected/cc1eccc4-fb41-4aa2-8521-c6395edee227-kube-api-access-rwvwv\") pod \"machine-config-operator-74547568cd-xp8q4\" (UID: \"cc1eccc4-fb41-4aa2-8521-c6395edee227\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.607717 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dp74t\" (UniqueName: \"kubernetes.io/projected/5f673e58-d96d-4194-8c5e-53be672a7996-kube-api-access-dp74t\") pod \"csi-hostpathplugin-ktwn7\" (UID: \"5f673e58-d96d-4194-8c5e-53be672a7996\") " pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.615480 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlwd6\" (UniqueName: \"kubernetes.io/projected/80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18-kube-api-access-xlwd6\") pod \"service-ca-operator-777779d784-cbsq8\" (UID: \"80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-cbsq8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.626918 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: E1211 14:15:22.627715 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:23.127702408 +0000 UTC m=+54.743104041 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.647644 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7xzc\" (UniqueName: \"kubernetes.io/projected/3f373953-70e7-4d9b-a3a1-cc35e7255c44-kube-api-access-b7xzc\") pod \"collect-profiles-29424375-vhx57\" (UID: \"3f373953-70e7-4d9b-a3a1-cc35e7255c44\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.657633 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lltql\" (UniqueName: \"kubernetes.io/projected/bb4aacc0-eed5-4747-b97a-8fa71269177f-kube-api-access-lltql\") pod \"packageserver-d55dfcdfc-mtc4n\" (UID: \"bb4aacc0-eed5-4747-b97a-8fa71269177f\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.705235 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9h2v4"] Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.726177 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.727594 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:22 crc kubenswrapper[4690]: E1211 14:15:22.727987 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:23.227967414 +0000 UTC m=+54.843369067 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.730496 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: E1211 14:15:22.732689 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:23.231156965 +0000 UTC m=+54.846558608 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.750360 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:22 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:22 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:22 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.750425 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.754189 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.772401 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.772971 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-6wqbb" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.786683 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.789703 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-q2v54"] Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.792425 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cbsq8" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.803417 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.834884 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:22 crc kubenswrapper[4690]: E1211 14:15:22.835291 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:23.335273079 +0000 UTC m=+54.950674722 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.837736 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.936837 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:22 crc kubenswrapper[4690]: E1211 14:15:22.937326 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:23.43731023 +0000 UTC m=+55.052711873 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.986780 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w"] Dec 11 14:15:22 crc kubenswrapper[4690]: I1211 14:15:22.988062 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4gxkn"] Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.038339 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:23 crc kubenswrapper[4690]: E1211 14:15:23.039193 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:23.539167387 +0000 UTC m=+55.154569030 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.123557 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-77vtz"] Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.141708 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-r56mp"] Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.144233 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:23 crc kubenswrapper[4690]: E1211 14:15:23.144702 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:23.644685767 +0000 UTC m=+55.260087410 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.215635 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-z8tth"] Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.245803 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:23 crc kubenswrapper[4690]: E1211 14:15:23.246187 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:23.746169334 +0000 UTC m=+55.361570977 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.273347 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nmql7"] Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.313476 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-jzm89" podStartSLOduration=19.313456996 podStartE2EDuration="19.313456996s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:23.312985694 +0000 UTC m=+54.928387357" watchObservedRunningTime="2025-12-11 14:15:23.313456996 +0000 UTC m=+54.928858639" Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.346767 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:23 crc kubenswrapper[4690]: E1211 14:15:23.347062 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:23.847048896 +0000 UTC m=+55.462450539 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.447877 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:23 crc kubenswrapper[4690]: E1211 14:15:23.448243 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:23.948222496 +0000 UTC m=+55.563624139 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.448459 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:23 crc kubenswrapper[4690]: E1211 14:15:23.448940 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:23.948924724 +0000 UTC m=+55.564326367 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.462756 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-w2qz9" event={"ID":"17ecb919-7205-4494-b314-db33c07f4bd4","Type":"ContainerStarted","Data":"3f798dda387ad4c3badcd8cc162cf21ca7e17bc4803d14fc3b5d9f2b95893fac"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.491700 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w" event={"ID":"6568e808-b0c8-4d3a-b454-51b9c810f2a3","Type":"ContainerStarted","Data":"05938fd0842e00c9a1887f34b3d4f6a655f4d8d990bd9ff44fbb83fdc98651ce"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.499938 4690 generic.go:334] "Generic (PLEG): container finished" podID="ff1e4a4f-2591-46a6-b478-7cef472306c3" containerID="06c89ec9b33e4b3bad6f93c7b11ba4008c35a64dd67f5cea92f141589606f181" exitCode=0 Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.500081 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8l25g" event={"ID":"ff1e4a4f-2591-46a6-b478-7cef472306c3","Type":"ContainerDied","Data":"06c89ec9b33e4b3bad6f93c7b11ba4008c35a64dd67f5cea92f141589606f181"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.500128 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8l25g" event={"ID":"ff1e4a4f-2591-46a6-b478-7cef472306c3","Type":"ContainerStarted","Data":"b15a0479e4db738590fd7397b103cfc27590f52a35d603eb1678111d9a13e5d2"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.510312 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" event={"ID":"7731421f-ecdd-4ff8-aa63-79ae9983425f","Type":"ContainerStarted","Data":"2a89d8f5c632a1757547335790301c5df242adb30d33d9da36742a86e78b3af6"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.527159 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2"] Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.549480 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:23 crc kubenswrapper[4690]: E1211 14:15:23.549890 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:24.049870888 +0000 UTC m=+55.665272531 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.557618 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-zsqv7"] Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.564150 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-8klv5" podStartSLOduration=19.564123788 podStartE2EDuration="19.564123788s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:23.549328774 +0000 UTC m=+55.164730417" watchObservedRunningTime="2025-12-11 14:15:23.564123788 +0000 UTC m=+55.179525431" Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.569734 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-r56mp" event={"ID":"6ddc4574-895b-41fa-a8c9-47c4f303d0d9","Type":"ContainerStarted","Data":"72ba5608fae06b1665db622386eb93503abbf7046198b1bf9dff10552d618765"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.575516 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-t9jhb"] Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.587119 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" event={"ID":"787bb691-ec3b-4dad-868a-3dcd2c33f4e1","Type":"ContainerStarted","Data":"ae4da0eeb6e663d73dd702ff84b1dd0bd68d1be389a53ad4aea88131bfdf70d5"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.590402 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zl7lm" event={"ID":"f5c5595c-6c47-401a-8067-a02411ef722b","Type":"ContainerStarted","Data":"a7a3629efd8a86c121c5883ef2dcffdadeed58a544934b23d3ce3f4ac78abe02"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.603643 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-rb4j9" event={"ID":"fbdc74c8-8b96-479b-b06c-637acb1bb68a","Type":"ContainerStarted","Data":"6d7c887a5d687afbffcce8f965d989d29df796427100d75a6cc1440c34617908"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.607161 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" event={"ID":"7f1e1cca-4214-4752-8bc4-1972df6de928","Type":"ContainerStarted","Data":"a4c6f664eff4117dec23224c8acc2bbca422f687bce3c28aea88c7d0785036cc"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.608498 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9h2v4" event={"ID":"57d5b2c5-8c4a-410b-9acf-94852865a8d6","Type":"ContainerStarted","Data":"03d2d23a0fe0bafa623fee76b1756a5cc12890509b41eda57f1a06bc1d9bd2bd"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.611382 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" event={"ID":"2e221469-9c10-4c08-925b-a4e4e4c3d208","Type":"ContainerStarted","Data":"9187434550edb9bebc3f9c98a2c856ba23dc2d496eeb57b1838eb0afd616b96e"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.612515 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-z8tth" event={"ID":"0fd85a56-df39-46c5-82a5-808a37a3dc8e","Type":"ContainerStarted","Data":"7b52075fe8c5433f9b2c0a5c14c04c8b1704e24e561496904e05e471bf73376f"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.626162 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bfp74" event={"ID":"3414e867-9935-42f6-9de9-853252ee06d3","Type":"ContainerStarted","Data":"75441718ac9aba0dfad7b4f3c08422e634dce1291584d22e6b3b558dd63aac85"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.627880 4690 generic.go:334] "Generic (PLEG): container finished" podID="7406c3ff-2d22-41c7-ae0b-fa8180cc5ede" containerID="2b95e38f759c143ba19a6ca70c8928f6d70b902f54ffc5850f3f94f13c783db8" exitCode=0 Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.628813 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" event={"ID":"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede","Type":"ContainerDied","Data":"2b95e38f759c143ba19a6ca70c8928f6d70b902f54ffc5850f3f94f13c783db8"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.632824 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" event={"ID":"4ecd7785-d164-4998-a361-031cd179f164","Type":"ContainerStarted","Data":"8f47e72b1fbcb868baddf956c7cba41b944badc9fba3d74da5a2654b5ed99803"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.634102 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mnd7" event={"ID":"f0fc8c25-f7b2-4f7b-8ea7-323e91557a24","Type":"ContainerStarted","Data":"8e802be58833b8c60012a3551ae72c38e50ba06aa420e99e3c1c790e112f0c1b"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.636309 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" event={"ID":"ac076d6f-30a7-4230-8320-936d709bf640","Type":"ContainerStarted","Data":"b5cd68ab420f08a46487c9c6b5c2446c1ea9942d36376b2bbda1561a27fc8fc1"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.636364 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" event={"ID":"ac076d6f-30a7-4230-8320-936d709bf640","Type":"ContainerStarted","Data":"69813d71df028176b3a1005cee317693e80b089983514df0028ab18cd5a17d65"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.641350 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6" event={"ID":"3c1f84bf-fc2e-465a-b3e1-436eb29e50ab","Type":"ContainerStarted","Data":"3bdfd6bb78e979f64219e8f43edd5723ef2dfb207b52b7e1105d723fafa8e0f5"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.645340 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" event={"ID":"3455abcd-3a0d-4376-908c-81484c62002a","Type":"ContainerStarted","Data":"47af5ce3da4e1ce2c841fab24329e1822a3ec1a560a2f5b3f2924291894b077f"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.651836 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:23 crc kubenswrapper[4690]: E1211 14:15:23.652412 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:24.152395601 +0000 UTC m=+55.767797244 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.653643 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nmql7" event={"ID":"6cb8be15-fc6f-43f0-865e-96da3547155f","Type":"ContainerStarted","Data":"2b68e3ede2774d334fc820413b4489add2e755191d2e33203447bcfa86c326ad"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.655261 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb" event={"ID":"24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab","Type":"ContainerStarted","Data":"c129ea081dc2c312d820fd5469f7e6a9bd424a426aedf60890c32f054615941e"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.657577 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-q2v54" event={"ID":"9dbcf64e-599d-458f-a5cd-92f58deaa813","Type":"ContainerStarted","Data":"fc76d35b49c4a3d57f57f8e198b6de7a58861227d7c0c9fe725425916283d067"} Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.660105 4690 patch_prober.go:28] interesting pod/downloads-7954f5f757-8klv5 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.660170 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8klv5" podUID="4ac34e73-25e1-449d-9c43-e6bc5054ede8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.669404 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.753919 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:23 crc kubenswrapper[4690]: E1211 14:15:23.755006 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:24.254989047 +0000 UTC m=+55.870390690 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.791016 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" podStartSLOduration=19.790996288 podStartE2EDuration="19.790996288s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:23.758842184 +0000 UTC m=+55.374243837" watchObservedRunningTime="2025-12-11 14:15:23.790996288 +0000 UTC m=+55.406397931" Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.833640 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-4lfzc" podStartSLOduration=19.833625516 podStartE2EDuration="19.833625516s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:23.833066502 +0000 UTC m=+55.448468155" watchObservedRunningTime="2025-12-11 14:15:23.833625516 +0000 UTC m=+55.449027159" Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.862891 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:23 crc kubenswrapper[4690]: E1211 14:15:23.863519 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:24.363505972 +0000 UTC m=+55.978907605 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.876906 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:23 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:23 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:23 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.876977 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.964415 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:23 crc kubenswrapper[4690]: E1211 14:15:23.964510 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:24.464492316 +0000 UTC m=+56.079893959 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:23 crc kubenswrapper[4690]: I1211 14:15:23.969727 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:23 crc kubenswrapper[4690]: E1211 14:15:23.970276 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:24.470246632 +0000 UTC m=+56.085648275 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:24 crc kubenswrapper[4690]: W1211 14:15:24.023876 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda3d8ec77_4a18_4938_970d_4d83f834a70e.slice/crio-a35f89b06d4908537445636215b35919df58b83fc57b4fc1836c592b94fb82e5 WatchSource:0}: Error finding container a35f89b06d4908537445636215b35919df58b83fc57b4fc1836c592b94fb82e5: Status 404 returned error can't find the container with id a35f89b06d4908537445636215b35919df58b83fc57b4fc1836c592b94fb82e5 Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.072311 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:24 crc kubenswrapper[4690]: E1211 14:15:24.072485 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:24.572449127 +0000 UTC m=+56.187850770 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.072762 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:24 crc kubenswrapper[4690]: E1211 14:15:24.073134 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:24.573122144 +0000 UTC m=+56.188523787 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.174718 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:24 crc kubenswrapper[4690]: E1211 14:15:24.175023 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:24.67492834 +0000 UTC m=+56.290330003 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.175357 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:24 crc kubenswrapper[4690]: E1211 14:15:24.175798 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:24.675788632 +0000 UTC m=+56.291190275 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.244533 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-rb4j9" podStartSLOduration=20.24451473 podStartE2EDuration="20.24451473s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:24.189972111 +0000 UTC m=+55.805373764" watchObservedRunningTime="2025-12-11 14:15:24.24451473 +0000 UTC m=+55.859916373" Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.282659 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:24 crc kubenswrapper[4690]: E1211 14:15:24.283134 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:24.783115747 +0000 UTC m=+56.398517390 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.322898 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-clbf8"] Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.397433 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:24 crc kubenswrapper[4690]: E1211 14:15:24.398359 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:24.898342442 +0000 UTC m=+56.513744095 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.416189 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb" podStartSLOduration=20.413485505 podStartE2EDuration="20.413485505s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:24.384648286 +0000 UTC m=+56.000049929" watchObservedRunningTime="2025-12-11 14:15:24.413485505 +0000 UTC m=+56.028887148" Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.441695 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r2jbn"] Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.449639 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr"] Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.456814 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-4b7r8"] Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.499818 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:24 crc kubenswrapper[4690]: E1211 14:15:24.500218 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:25.000201909 +0000 UTC m=+56.615603552 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.500243 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5n4lj"] Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.503880 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57"] Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.522479 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-czjn5" podStartSLOduration=21.522462272 podStartE2EDuration="21.522462272s" podCreationTimestamp="2025-12-11 14:15:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:24.495344226 +0000 UTC m=+56.110745869" watchObservedRunningTime="2025-12-11 14:15:24.522462272 +0000 UTC m=+56.137863915" Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.529680 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-h2qcw" podStartSLOduration=21.529657214 podStartE2EDuration="21.529657214s" podCreationTimestamp="2025-12-11 14:15:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:24.519967949 +0000 UTC m=+56.135369602" watchObservedRunningTime="2025-12-11 14:15:24.529657214 +0000 UTC m=+56.145058857" Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.531085 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-sgjk8"] Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.603304 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:24 crc kubenswrapper[4690]: E1211 14:15:24.603657 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:25.103636656 +0000 UTC m=+56.719038299 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.698880 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" event={"ID":"3f373953-70e7-4d9b-a3a1-cc35e7255c44","Type":"ContainerStarted","Data":"f3634b28c4917595e64e4a96f39ddaaaccf73486c3e07884a23357eb4016815c"} Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.704566 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:24 crc kubenswrapper[4690]: E1211 14:15:24.705117 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:25.205088792 +0000 UTC m=+56.820490455 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.705172 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" event={"ID":"787bb691-ec3b-4dad-868a-3dcd2c33f4e1","Type":"ContainerStarted","Data":"c69bcd89ea4fc0cd893aaae8c8c5ac078200644c1c10caca5f968f81468bc12d"} Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.710090 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5n4lj" event={"ID":"f62b5697-8059-4222-b731-2f8188e9a270","Type":"ContainerStarted","Data":"7a2bd3c4f2bacef82922134005cafadcd5e88d370a2be60823e286dbc8b93a6c"} Dec 11 14:15:24 crc kubenswrapper[4690]: W1211 14:15:24.726294 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf3ec5c48_884c_4a5c_8651_24cd598a9ec2.slice/crio-a229dcc10484c2ee9dfd9a25406c1e71dca39ef69bfe510e2094d4008d89b1c6 WatchSource:0}: Error finding container a229dcc10484c2ee9dfd9a25406c1e71dca39ef69bfe510e2094d4008d89b1c6: Status 404 returned error can't find the container with id a229dcc10484c2ee9dfd9a25406c1e71dca39ef69bfe510e2094d4008d89b1c6 Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.735217 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:24 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:24 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:24 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.735263 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.756183 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w" event={"ID":"6568e808-b0c8-4d3a-b454-51b9c810f2a3","Type":"ContainerStarted","Data":"22fc06fac588110c9670d3d021a058996d18df1a760a8cdf65fc19e72cd53f5b"} Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.761072 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9h2v4" event={"ID":"57d5b2c5-8c4a-410b-9acf-94852865a8d6","Type":"ContainerStarted","Data":"7a828ed2776a5dbe1327ff0b8426cd41d6ee7fc8e2ae8b2c1317641468f64e98"} Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.795011 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-9h2v4" podStartSLOduration=20.794994697 podStartE2EDuration="20.794994697s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:24.794535005 +0000 UTC m=+56.409936648" watchObservedRunningTime="2025-12-11 14:15:24.794994697 +0000 UTC m=+56.410396340" Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.805930 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:24 crc kubenswrapper[4690]: E1211 14:15:24.806522 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:25.306506658 +0000 UTC m=+56.921908301 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.810023 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2" event={"ID":"7b031965-e2b9-4d51-b598-9b8366944ee5","Type":"ContainerStarted","Data":"34da874f504766121374500dfc626b329ee0a9ec68b0dc6f7186e112b5de27b1"} Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.829243 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mnd7" event={"ID":"f0fc8c25-f7b2-4f7b-8ea7-323e91557a24","Type":"ContainerStarted","Data":"95687a6796db021579bf27704e7d2e9495faede5053248db78c2e63faee5a6b0"} Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.838309 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4"] Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.839815 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4gxkn" event={"ID":"e8f5b2a0-33a0-46b8-9f75-6d732599e8e1","Type":"ContainerStarted","Data":"59a4466e3ac8223809ec646a8482772a89659e945fc2c29673c4082029f46c73"} Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.843325 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-t9jhb" event={"ID":"080f2e20-451b-4825-89bd-2f35b98158b7","Type":"ContainerStarted","Data":"4c7c1628cbf159f701b03a326ef72808e31be411835b106e039f82d613f79060"} Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.853251 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6wqbb"] Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.854416 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6" event={"ID":"3c1f84bf-fc2e-465a-b3e1-436eb29e50ab","Type":"ContainerStarted","Data":"b33227e78551692217bceb8773b075b7fd6adb0c95c42260e7918ab3c1c7860c"} Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.855878 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ktwn7"] Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.865337 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" event={"ID":"7f1e1cca-4214-4752-8bc4-1972df6de928","Type":"ContainerStarted","Data":"c10bd24b0090c5fb5988ea830e31ea9e02f1bf8ac02da06188ddfd1a9c9c2379"} Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.878099 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-zsqv7" event={"ID":"a3d8ec77-4a18-4938-970d-4d83f834a70e","Type":"ContainerStarted","Data":"a35f89b06d4908537445636215b35919df58b83fc57b4fc1836c592b94fb82e5"} Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.882009 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-clbf8" event={"ID":"2c2d5543-2073-4952-b1b2-071e717d170f","Type":"ContainerStarted","Data":"c77296975fa18257f0ee4628c1f4189dea3b75c987e1e57f4d51fcc8d7fbf44d"} Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.883136 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" event={"ID":"2e221469-9c10-4c08-925b-a4e4e4c3d208","Type":"ContainerStarted","Data":"ada26d4bd244788c3df86e07bb4be161eeb5c23df33915a51c3859e5760187cc"} Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.885090 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.889412 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" event={"ID":"ef32fe73-cf65-4729-9f54-5cffddba74e1","Type":"ContainerStarted","Data":"7f2491e992d0a99230ce9fcdb61842d735db92dbd97d095a74f761fa93260fd9"} Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.894208 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-zrnhb" event={"ID":"24a61e7c-cfd8-4f15-8c3f-512f9b93c7ab","Type":"ContainerStarted","Data":"2efb2ce27fcd1e402850f2bbf121a6af7060c2adc8db8819ec4700be02a1e56c"} Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.908926 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:24 crc kubenswrapper[4690]: E1211 14:15:24.909850 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:25.409835342 +0000 UTC m=+57.025236985 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.911325 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-cbsq8"] Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.919015 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n"] Dec 11 14:15:24 crc kubenswrapper[4690]: I1211 14:15:24.939795 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" podStartSLOduration=20.93977801 podStartE2EDuration="20.93977801s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:24.937994245 +0000 UTC m=+56.553395918" watchObservedRunningTime="2025-12-11 14:15:24.93977801 +0000 UTC m=+56.555179653" Dec 11 14:15:24 crc kubenswrapper[4690]: W1211 14:15:24.982529 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcc1eccc4_fb41_4aa2_8521_c6395edee227.slice/crio-794f63a3703b4ef64459cd69a415b75511021949a8156557f0c7ce5b69d19751 WatchSource:0}: Error finding container 794f63a3703b4ef64459cd69a415b75511021949a8156557f0c7ce5b69d19751: Status 404 returned error can't find the container with id 794f63a3703b4ef64459cd69a415b75511021949a8156557f0c7ce5b69d19751 Dec 11 14:15:24 crc kubenswrapper[4690]: W1211 14:15:24.983917 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeeea8bdc_4b47_487e_bf46_1acbf11fc9bd.slice/crio-9db384463839599cb0699f161b962d1b26e6d4e29919eba2a28eb84161e2f119 WatchSource:0}: Error finding container 9db384463839599cb0699f161b962d1b26e6d4e29919eba2a28eb84161e2f119: Status 404 returned error can't find the container with id 9db384463839599cb0699f161b962d1b26e6d4e29919eba2a28eb84161e2f119 Dec 11 14:15:24 crc kubenswrapper[4690]: W1211 14:15:24.993107 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod80d4e7ca_0dc1_4ed6_a4e6_1cdc052c4e18.slice/crio-0aaede81d391031455514af69929d904c79539d77d95214c0b8a2b9ac49d7982 WatchSource:0}: Error finding container 0aaede81d391031455514af69929d904c79539d77d95214c0b8a2b9ac49d7982: Status 404 returned error can't find the container with id 0aaede81d391031455514af69929d904c79539d77d95214c0b8a2b9ac49d7982 Dec 11 14:15:25 crc kubenswrapper[4690]: W1211 14:15:25.004154 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbb4aacc0_eed5_4747_b97a_8fa71269177f.slice/crio-755d19cfb0a4389ccf99d028f5681acfb739fa8a743ed1091c3b7900e8439e2c WatchSource:0}: Error finding container 755d19cfb0a4389ccf99d028f5681acfb739fa8a743ed1091c3b7900e8439e2c: Status 404 returned error can't find the container with id 755d19cfb0a4389ccf99d028f5681acfb739fa8a743ed1091c3b7900e8439e2c Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.010185 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:25 crc kubenswrapper[4690]: E1211 14:15:25.014517 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:25.51449518 +0000 UTC m=+57.129896823 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.121564 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:25 crc kubenswrapper[4690]: E1211 14:15:25.121915 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:25.621896857 +0000 UTC m=+57.237298500 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.224758 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:25 crc kubenswrapper[4690]: E1211 14:15:25.226014 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:25.725994451 +0000 UTC m=+57.341396094 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.327624 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:25 crc kubenswrapper[4690]: E1211 14:15:25.328262 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:25.828242978 +0000 UTC m=+57.443644621 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.431120 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.432537 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:25 crc kubenswrapper[4690]: E1211 14:15:25.432882 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:25.932869685 +0000 UTC m=+57.548271328 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.542532 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:25 crc kubenswrapper[4690]: E1211 14:15:25.543538 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:26.043517124 +0000 UTC m=+57.658918777 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.646076 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:25 crc kubenswrapper[4690]: E1211 14:15:25.646705 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:26.146689494 +0000 UTC m=+57.762091147 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.732828 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:25 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:25 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:25 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.733229 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.748631 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:25 crc kubenswrapper[4690]: E1211 14:15:25.748933 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:26.24891521 +0000 UTC m=+57.864316853 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.865831 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:25 crc kubenswrapper[4690]: E1211 14:15:25.866400 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:26.366380852 +0000 UTC m=+57.981782495 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.911357 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" event={"ID":"7406c3ff-2d22-41c7-ae0b-fa8180cc5ede","Type":"ContainerStarted","Data":"4de6a6fd2b98cf7675a72ca8333ba85630da0e9752b86b936d0199afa7d8707b"} Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.918574 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-q2v54" event={"ID":"9dbcf64e-599d-458f-a5cd-92f58deaa813","Type":"ContainerStarted","Data":"33555c160b9682fdb2854d7a06aa91dadd63be7142cd3f60aca137d274674cac"} Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.919369 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-q2v54" Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.932115 4690 patch_prober.go:28] interesting pod/console-operator-58897d9998-q2v54 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.14:8443/readyz\": dial tcp 10.217.0.14:8443: connect: connection refused" start-of-body= Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.932174 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-q2v54" podUID="9dbcf64e-599d-458f-a5cd-92f58deaa813" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.14:8443/readyz\": dial tcp 10.217.0.14:8443: connect: connection refused" Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.960743 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" podStartSLOduration=21.960723629 podStartE2EDuration="21.960723629s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:25.948690655 +0000 UTC m=+57.564092288" watchObservedRunningTime="2025-12-11 14:15:25.960723629 +0000 UTC m=+57.576125272" Dec 11 14:15:25 crc kubenswrapper[4690]: I1211 14:15:25.966976 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:25 crc kubenswrapper[4690]: E1211 14:15:25.968164 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:26.468141717 +0000 UTC m=+58.083543360 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.031617 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mnd7" event={"ID":"f0fc8c25-f7b2-4f7b-8ea7-323e91557a24","Type":"ContainerStarted","Data":"034cadaff32509af44d35804a78508740e4ff7d64e265ee4007ad8d287f02bde"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.072329 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:26 crc kubenswrapper[4690]: E1211 14:15:26.073059 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:26.57301786 +0000 UTC m=+58.188419503 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.074437 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bfp74" event={"ID":"3414e867-9935-42f6-9de9-853252ee06d3","Type":"ContainerStarted","Data":"a03aecf1cb7da4b78cf0807ee130497257df3a98d889de6d9c553d46b3fdf9c4"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.086101 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8l25g" event={"ID":"ff1e4a4f-2591-46a6-b478-7cef472306c3","Type":"ContainerStarted","Data":"2926b2d8e67b0f641037ff8a034553305cb572644a1bf16e9c2191ee1347aa40"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.087888 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8l25g" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.101548 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-6mnd7" podStartSLOduration=22.101532021 podStartE2EDuration="22.101532021s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.100338931 +0000 UTC m=+57.715740574" watchObservedRunningTime="2025-12-11 14:15:26.101532021 +0000 UTC m=+57.716933664" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.101984 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-q2v54" podStartSLOduration=22.101978063 podStartE2EDuration="22.101978063s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.053118966 +0000 UTC m=+57.668520609" watchObservedRunningTime="2025-12-11 14:15:26.101978063 +0000 UTC m=+57.717379706" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.115276 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" event={"ID":"00f7dbc0-5b6f-4f74-a4e4-43759758be95","Type":"ContainerStarted","Data":"c7efa6125e6661f92724567c72836d33c207ea4ed35bcb7be157fa116ad5ccbb"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.115335 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" event={"ID":"00f7dbc0-5b6f-4f74-a4e4-43759758be95","Type":"ContainerStarted","Data":"07023e0f01fd7be1116f8f89dcf4d75ea50b93c4a5b45d7fec66cbf5d38a755a"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.116226 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.125009 4690 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-r2jbn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.125068 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.155217 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8l25g" podStartSLOduration=22.155195409 podStartE2EDuration="22.155195409s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.136839654 +0000 UTC m=+57.752241297" watchObservedRunningTime="2025-12-11 14:15:26.155195409 +0000 UTC m=+57.770597052" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.155676 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-bfp74" podStartSLOduration=22.155671021 podStartE2EDuration="22.155671021s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.153685981 +0000 UTC m=+57.769087634" watchObservedRunningTime="2025-12-11 14:15:26.155671021 +0000 UTC m=+57.771072664" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.173352 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-z8tth" event={"ID":"0fd85a56-df39-46c5-82a5-808a37a3dc8e","Type":"ContainerStarted","Data":"d9aaaab6434ed171b24ccd88a50e9eaf6a01a93a1d1f106b806370f3d4dd610e"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.173846 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:26 crc kubenswrapper[4690]: E1211 14:15:26.175178 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:26.675158334 +0000 UTC m=+58.290559977 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.185122 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" podStartSLOduration=22.185096225 podStartE2EDuration="22.185096225s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.184453039 +0000 UTC m=+57.799854682" watchObservedRunningTime="2025-12-11 14:15:26.185096225 +0000 UTC m=+57.800497878" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.201768 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" event={"ID":"3f373953-70e7-4d9b-a3a1-cc35e7255c44","Type":"ContainerStarted","Data":"5b5cdf605a6e58ba7d018dd67aa6df394839825b40622538c165d79c28e3eda8"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.207648 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" event={"ID":"5f673e58-d96d-4194-8c5e-53be672a7996","Type":"ContainerStarted","Data":"fb15e149e4dbf458907ab8f4d755c37db704d29cdc201186de735ef73394129a"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.209242 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-t9jhb" event={"ID":"080f2e20-451b-4825-89bd-2f35b98158b7","Type":"ContainerStarted","Data":"c15df7e21c4cd31f5fc6c67f7733465ce052f1c466c003babb159ca5af108110"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.220011 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zl7lm" event={"ID":"f5c5595c-6c47-401a-8067-a02411ef722b","Type":"ContainerStarted","Data":"7775e9b6b9cae05f2680a188f551e58bc3da91b95ddcdc6a3e6f66181b6c852e"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.231033 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" podStartSLOduration=22.231009047 podStartE2EDuration="22.231009047s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.227343424 +0000 UTC m=+57.842745067" watchObservedRunningTime="2025-12-11 14:15:26.231009047 +0000 UTC m=+57.846410700" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.256500 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cbsq8" event={"ID":"80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18","Type":"ContainerStarted","Data":"0aaede81d391031455514af69929d904c79539d77d95214c0b8a2b9ac49d7982"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.276522 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:26 crc kubenswrapper[4690]: E1211 14:15:26.280155 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:26.78013278 +0000 UTC m=+58.395534423 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.296472 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zl7lm" podStartSLOduration=22.296437942 podStartE2EDuration="22.296437942s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.279501704 +0000 UTC m=+57.894903347" watchObservedRunningTime="2025-12-11 14:15:26.296437942 +0000 UTC m=+57.911839585" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.305911 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-4b7r8" event={"ID":"f3ec5c48-884c-4a5c-8651-24cd598a9ec2","Type":"ContainerStarted","Data":"5ff919557c28ffbd286477bbb5a557ccdf2eb37ad073078f6fb57dfabca8753e"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.305965 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-4b7r8" event={"ID":"f3ec5c48-884c-4a5c-8651-24cd598a9ec2","Type":"ContainerStarted","Data":"a229dcc10484c2ee9dfd9a25406c1e71dca39ef69bfe510e2094d4008d89b1c6"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.315240 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4gxkn" event={"ID":"e8f5b2a0-33a0-46b8-9f75-6d732599e8e1","Type":"ContainerStarted","Data":"f70fa36135a3a51940bd71327fef94eb830b803dbcadcbc530f35de0193f94fa"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.324967 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-4b7r8" podStartSLOduration=7.3249320430000004 podStartE2EDuration="7.324932043s" podCreationTimestamp="2025-12-11 14:15:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.324460101 +0000 UTC m=+57.939861744" watchObservedRunningTime="2025-12-11 14:15:26.324932043 +0000 UTC m=+57.940333686" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.328114 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2" event={"ID":"7b031965-e2b9-4d51-b598-9b8366944ee5","Type":"ContainerStarted","Data":"5ff278cbb914176aff0e2c8af08ac896785db48d0f54963267c7986798d774ac"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.330266 4690 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-b4ml2 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" start-of-body= Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.330309 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2" podUID="7b031965-e2b9-4d51-b598-9b8366944ee5" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.33:8443/healthz\": dial tcp 10.217.0.33:8443: connect: connection refused" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.330505 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.371279 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6" event={"ID":"3c1f84bf-fc2e-465a-b3e1-436eb29e50ab","Type":"ContainerStarted","Data":"d4b4b527727e9b6d448de14c5aba63fcd5fb0243def020e150254b5c75b7277f"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.377421 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:26 crc kubenswrapper[4690]: E1211 14:15:26.378705 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:26.878685543 +0000 UTC m=+58.494087186 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.380159 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" event={"ID":"7f1e1cca-4214-4752-8bc4-1972df6de928","Type":"ContainerStarted","Data":"b90bbb15d9dbc0263ba6310d48e56346d2e24e222c29a15cc9dc13b875e332d1"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.381609 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" event={"ID":"ef32fe73-cf65-4729-9f54-5cffddba74e1","Type":"ContainerStarted","Data":"20767f1dc3a7a35867bd14635aaec20712bb53c3eed36d00b57984c17d070f4f"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.385333 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-sgjk8" event={"ID":"807380a2-3571-4626-b33a-71e79bdbf087","Type":"ContainerStarted","Data":"f510a27fa43415ad8d297d6de85bb476371c7b43a1bcf59c969aa70cd14d3b46"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.385367 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-sgjk8" event={"ID":"807380a2-3571-4626-b33a-71e79bdbf087","Type":"ContainerStarted","Data":"e31512ad63faf545ec887bc34c73d0bea0689b35a0bd7e64e8e6d74130515bc5"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.386615 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" event={"ID":"bb4aacc0-eed5-4747-b97a-8fa71269177f","Type":"ContainerStarted","Data":"6ee270181034940d8f80ec74a1076ec0852deb82a6d190fc530614f491329b4e"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.386673 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" event={"ID":"bb4aacc0-eed5-4747-b97a-8fa71269177f","Type":"ContainerStarted","Data":"755d19cfb0a4389ccf99d028f5681acfb739fa8a743ed1091c3b7900e8439e2c"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.386883 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.389024 4690 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-mtc4n container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" start-of-body= Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.389061 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" podUID="bb4aacc0-eed5-4747-b97a-8fa71269177f" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.390705 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-clbf8" event={"ID":"2c2d5543-2073-4952-b1b2-071e717d170f","Type":"ContainerStarted","Data":"1ddcbbbd47dc4bb4526a33175cfcaca621a3ee9e993e6b8bb5a7ef778890a135"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.398807 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-8gzc6" podStartSLOduration=22.398790722 podStartE2EDuration="22.398790722s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.398390751 +0000 UTC m=+58.013792394" watchObservedRunningTime="2025-12-11 14:15:26.398790722 +0000 UTC m=+58.014192365" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.399980 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2" podStartSLOduration=22.399975702 podStartE2EDuration="22.399975702s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.364213197 +0000 UTC m=+57.979614840" watchObservedRunningTime="2025-12-11 14:15:26.399975702 +0000 UTC m=+58.015377345" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.403614 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nmql7" event={"ID":"6cb8be15-fc6f-43f0-865e-96da3547155f","Type":"ContainerStarted","Data":"c9109415bee266602f0a60c269732175dbd9071c91a0c97877c88fb9b801c400"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.412559 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-w2qz9" event={"ID":"17ecb919-7205-4494-b314-db33c07f4bd4","Type":"ContainerStarted","Data":"dc67d4e1914a01c48e26dce2da8490729e33752a53263f73329e18b7e0b6a58a"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.420590 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" event={"ID":"4ecd7785-d164-4998-a361-031cd179f164","Type":"ContainerStarted","Data":"f989ede1a804dd570016c99c4c69b56bb16acb69d1f20f42dcf0da667a3ebe5b"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.430990 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-crxzj" podStartSLOduration=22.430973636 podStartE2EDuration="22.430973636s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.429538299 +0000 UTC m=+58.044939942" watchObservedRunningTime="2025-12-11 14:15:26.430973636 +0000 UTC m=+58.046375279" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.433600 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" event={"ID":"cc1eccc4-fb41-4aa2-8521-c6395edee227","Type":"ContainerStarted","Data":"1f64dfbcad83e3d8dd23d7ae6006ff371357cd572e9c9d4ab6098296a5927a00"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.433634 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" event={"ID":"cc1eccc4-fb41-4aa2-8521-c6395edee227","Type":"ContainerStarted","Data":"794f63a3703b4ef64459cd69a415b75511021949a8156557f0c7ce5b69d19751"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.437651 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-zsqv7" event={"ID":"a3d8ec77-4a18-4938-970d-4d83f834a70e","Type":"ContainerStarted","Data":"c4f3e96092bc56823dd1060117313e1da406c4d41004ac17b2b61322f37bd301"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.439152 4690 generic.go:334] "Generic (PLEG): container finished" podID="6ddc4574-895b-41fa-a8c9-47c4f303d0d9" containerID="818b0946bf9f92c8e3e8d3f84ed116db939ec4ecf730f497ae244248fca2b49d" exitCode=0 Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.439941 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-r56mp" event={"ID":"6ddc4574-895b-41fa-a8c9-47c4f303d0d9","Type":"ContainerDied","Data":"818b0946bf9f92c8e3e8d3f84ed116db939ec4ecf730f497ae244248fca2b49d"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.443649 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" event={"ID":"7731421f-ecdd-4ff8-aa63-79ae9983425f","Type":"ContainerStarted","Data":"dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.444416 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.457513 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-clbf8" podStartSLOduration=22.457499617 podStartE2EDuration="22.457499617s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.45642198 +0000 UTC m=+58.071823623" watchObservedRunningTime="2025-12-11 14:15:26.457499617 +0000 UTC m=+58.072901260" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.458297 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-6wqbb" event={"ID":"eeea8bdc-4b47-487e-bf46-1acbf11fc9bd","Type":"ContainerStarted","Data":"65e79d56e4972c1230c2e01b5b617f46d9bb33936c15a01383307aaa1866af47"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.459376 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.459412 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-6wqbb" event={"ID":"eeea8bdc-4b47-487e-bf46-1acbf11fc9bd","Type":"ContainerStarted","Data":"9db384463839599cb0699f161b962d1b26e6d4e29919eba2a28eb84161e2f119"} Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.479013 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.485195 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w" Dec 11 14:15:26 crc kubenswrapper[4690]: E1211 14:15:26.488496 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:26.988474141 +0000 UTC m=+58.603875844 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.503629 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" podStartSLOduration=22.503611463 podStartE2EDuration="22.503611463s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.500501335 +0000 UTC m=+58.115902978" watchObservedRunningTime="2025-12-11 14:15:26.503611463 +0000 UTC m=+58.119013106" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.514754 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.543529 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-j2v2w" podStartSLOduration=22.543508413 podStartE2EDuration="22.543508413s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.541712457 +0000 UTC m=+58.157114100" watchObservedRunningTime="2025-12-11 14:15:26.543508413 +0000 UTC m=+58.158910056" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.562731 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-w2qz9" podStartSLOduration=7.562708799 podStartE2EDuration="7.562708799s" podCreationTimestamp="2025-12-11 14:15:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.558916423 +0000 UTC m=+58.174318066" watchObservedRunningTime="2025-12-11 14:15:26.562708799 +0000 UTC m=+58.178110442" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.580828 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:26 crc kubenswrapper[4690]: E1211 14:15:26.585024 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:27.085000363 +0000 UTC m=+58.700402076 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.635751 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" podStartSLOduration=23.635729356 podStartE2EDuration="23.635729356s" podCreationTimestamp="2025-12-11 14:15:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.630446772 +0000 UTC m=+58.245848425" watchObservedRunningTime="2025-12-11 14:15:26.635729356 +0000 UTC m=+58.251130999" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.669943 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" podStartSLOduration=7.669917341 podStartE2EDuration="7.669917341s" podCreationTimestamp="2025-12-11 14:15:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.663698283 +0000 UTC m=+58.279099926" watchObservedRunningTime="2025-12-11 14:15:26.669917341 +0000 UTC m=+58.285318984" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.684198 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.685971 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:26 crc kubenswrapper[4690]: E1211 14:15:26.686337 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:27.186322536 +0000 UTC m=+58.801724179 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.694972 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-nmql7" podStartSLOduration=22.694938104 podStartE2EDuration="22.694938104s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.692679967 +0000 UTC m=+58.308081610" watchObservedRunningTime="2025-12-11 14:15:26.694938104 +0000 UTC m=+58.310339747" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.711943 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.728529 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-77vtz" podStartSLOduration=22.728512853 podStartE2EDuration="22.728512853s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.72757802 +0000 UTC m=+58.342979663" watchObservedRunningTime="2025-12-11 14:15:26.728512853 +0000 UTC m=+58.343914496" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.737153 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:26 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:26 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:26 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.737200 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.759326 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.759634 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.789499 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:26 crc kubenswrapper[4690]: E1211 14:15:26.789823 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:27.289778193 +0000 UTC m=+58.905179846 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.789972 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:26 crc kubenswrapper[4690]: E1211 14:15:26.792014 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:27.291994529 +0000 UTC m=+58.907396172 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.796682 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-6wqbb" podStartSLOduration=22.796666047 podStartE2EDuration="22.796666047s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.780095338 +0000 UTC m=+58.395496991" watchObservedRunningTime="2025-12-11 14:15:26.796666047 +0000 UTC m=+58.412067690" Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.891367 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:26 crc kubenswrapper[4690]: E1211 14:15:26.891773 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:27.391752923 +0000 UTC m=+59.007154566 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:26 crc kubenswrapper[4690]: I1211 14:15:26.993057 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:26 crc kubenswrapper[4690]: E1211 14:15:26.993398 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:27.493382634 +0000 UTC m=+59.108784277 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.093825 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:27 crc kubenswrapper[4690]: E1211 14:15:27.094007 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:27.593981629 +0000 UTC m=+59.209383272 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.094089 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:27 crc kubenswrapper[4690]: E1211 14:15:27.094456 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:27.594445821 +0000 UTC m=+59.209847544 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.195151 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:27 crc kubenswrapper[4690]: E1211 14:15:27.195359 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:27.695329393 +0000 UTC m=+59.310731046 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.195447 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:27 crc kubenswrapper[4690]: E1211 14:15:27.195821 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:27.695810625 +0000 UTC m=+59.311212268 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.257912 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=1.257894366 podStartE2EDuration="1.257894366s" podCreationTimestamp="2025-12-11 14:15:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:26.830309479 +0000 UTC m=+58.445711122" watchObservedRunningTime="2025-12-11 14:15:27.257894366 +0000 UTC m=+58.873296009" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.259986 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-8bfnc"] Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.261079 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8bfnc" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.265156 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.274591 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8bfnc"] Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.298470 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:27 crc kubenswrapper[4690]: E1211 14:15:27.298681 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:27.798653577 +0000 UTC m=+59.414055220 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.298750 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:27 crc kubenswrapper[4690]: E1211 14:15:27.299148 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:27.799139159 +0000 UTC m=+59.414540802 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.400284 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:27 crc kubenswrapper[4690]: E1211 14:15:27.400500 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:27.900465073 +0000 UTC m=+59.515866716 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.400558 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.400759 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/646a2a29-480a-4725-9407-80d8a4f2a4bb-catalog-content\") pod \"community-operators-8bfnc\" (UID: \"646a2a29-480a-4725-9407-80d8a4f2a4bb\") " pod="openshift-marketplace/community-operators-8bfnc" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.400802 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/646a2a29-480a-4725-9407-80d8a4f2a4bb-utilities\") pod \"community-operators-8bfnc\" (UID: \"646a2a29-480a-4725-9407-80d8a4f2a4bb\") " pod="openshift-marketplace/community-operators-8bfnc" Dec 11 14:15:27 crc kubenswrapper[4690]: E1211 14:15:27.400924 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:27.900911014 +0000 UTC m=+59.516312657 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.401051 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v57hq\" (UniqueName: \"kubernetes.io/projected/646a2a29-480a-4725-9407-80d8a4f2a4bb-kube-api-access-v57hq\") pod \"community-operators-8bfnc\" (UID: \"646a2a29-480a-4725-9407-80d8a4f2a4bb\") " pod="openshift-marketplace/community-operators-8bfnc" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.456884 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fs97w"] Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.458042 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fs97w" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.460517 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.462338 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" event={"ID":"cc1eccc4-fb41-4aa2-8521-c6395edee227","Type":"ContainerStarted","Data":"a6d6c3de852eb804d1dcc2774e536fe2e47ecc57e8f015b611f873e89dfe2c79"} Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.463930 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cbsq8" event={"ID":"80d4e7ca-0dc1-4ed6-a4e6-1cdc052c4e18","Type":"ContainerStarted","Data":"c4b855a5119a84be0e7c57a264a7e84ed05b6452c82b0758e28d726964035044"} Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.464613 4690 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-r2jbn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.464659 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.464852 4690 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-mtc4n container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" start-of-body= Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.464895 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" podUID="bb4aacc0-eed5-4747-b97a-8fa71269177f" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.26:5443/healthz\": dial tcp 10.217.0.26:5443: connect: connection refused" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.469020 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-b4ml2" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.502383 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:27 crc kubenswrapper[4690]: E1211 14:15:27.502585 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:28.002553656 +0000 UTC m=+59.617955299 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.502686 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v57hq\" (UniqueName: \"kubernetes.io/projected/646a2a29-480a-4725-9407-80d8a4f2a4bb-kube-api-access-v57hq\") pod \"community-operators-8bfnc\" (UID: \"646a2a29-480a-4725-9407-80d8a4f2a4bb\") " pod="openshift-marketplace/community-operators-8bfnc" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.502739 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.502834 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/646a2a29-480a-4725-9407-80d8a4f2a4bb-catalog-content\") pod \"community-operators-8bfnc\" (UID: \"646a2a29-480a-4725-9407-80d8a4f2a4bb\") " pod="openshift-marketplace/community-operators-8bfnc" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.502878 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/646a2a29-480a-4725-9407-80d8a4f2a4bb-utilities\") pod \"community-operators-8bfnc\" (UID: \"646a2a29-480a-4725-9407-80d8a4f2a4bb\") " pod="openshift-marketplace/community-operators-8bfnc" Dec 11 14:15:27 crc kubenswrapper[4690]: E1211 14:15:27.503245 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:28.003225763 +0000 UTC m=+59.618627406 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.503359 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/646a2a29-480a-4725-9407-80d8a4f2a4bb-utilities\") pod \"community-operators-8bfnc\" (UID: \"646a2a29-480a-4725-9407-80d8a4f2a4bb\") " pod="openshift-marketplace/community-operators-8bfnc" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.503399 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/646a2a29-480a-4725-9407-80d8a4f2a4bb-catalog-content\") pod \"community-operators-8bfnc\" (UID: \"646a2a29-480a-4725-9407-80d8a4f2a4bb\") " pod="openshift-marketplace/community-operators-8bfnc" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.508714 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fs97w"] Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.526731 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v57hq\" (UniqueName: \"kubernetes.io/projected/646a2a29-480a-4725-9407-80d8a4f2a4bb-kube-api-access-v57hq\") pod \"community-operators-8bfnc\" (UID: \"646a2a29-480a-4725-9407-80d8a4f2a4bb\") " pod="openshift-marketplace/community-operators-8bfnc" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.576893 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8bfnc" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.578256 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-vl4d4"] Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.604478 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:27 crc kubenswrapper[4690]: E1211 14:15:27.604689 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:28.104657778 +0000 UTC m=+59.720059441 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.605281 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.605413 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-knccb\" (UniqueName: \"kubernetes.io/projected/8cd40416-92d2-41ec-b6ae-ba668ccc5685-kube-api-access-knccb\") pod \"certified-operators-fs97w\" (UID: \"8cd40416-92d2-41ec-b6ae-ba668ccc5685\") " pod="openshift-marketplace/certified-operators-fs97w" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.605461 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd40416-92d2-41ec-b6ae-ba668ccc5685-utilities\") pod \"certified-operators-fs97w\" (UID: \"8cd40416-92d2-41ec-b6ae-ba668ccc5685\") " pod="openshift-marketplace/certified-operators-fs97w" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.605626 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd40416-92d2-41ec-b6ae-ba668ccc5685-catalog-content\") pod \"certified-operators-fs97w\" (UID: \"8cd40416-92d2-41ec-b6ae-ba668ccc5685\") " pod="openshift-marketplace/certified-operators-fs97w" Dec 11 14:15:27 crc kubenswrapper[4690]: E1211 14:15:27.609993 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:28.109977542 +0000 UTC m=+59.725379295 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.652604 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kmjlt"] Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.653535 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kmjlt" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.670716 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kmjlt"] Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.706659 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.706894 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd40416-92d2-41ec-b6ae-ba668ccc5685-catalog-content\") pod \"certified-operators-fs97w\" (UID: \"8cd40416-92d2-41ec-b6ae-ba668ccc5685\") " pod="openshift-marketplace/certified-operators-fs97w" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.707049 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-knccb\" (UniqueName: \"kubernetes.io/projected/8cd40416-92d2-41ec-b6ae-ba668ccc5685-kube-api-access-knccb\") pod \"certified-operators-fs97w\" (UID: \"8cd40416-92d2-41ec-b6ae-ba668ccc5685\") " pod="openshift-marketplace/certified-operators-fs97w" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.707075 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd40416-92d2-41ec-b6ae-ba668ccc5685-utilities\") pod \"certified-operators-fs97w\" (UID: \"8cd40416-92d2-41ec-b6ae-ba668ccc5685\") " pod="openshift-marketplace/certified-operators-fs97w" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.707569 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd40416-92d2-41ec-b6ae-ba668ccc5685-utilities\") pod \"certified-operators-fs97w\" (UID: \"8cd40416-92d2-41ec-b6ae-ba668ccc5685\") " pod="openshift-marketplace/certified-operators-fs97w" Dec 11 14:15:27 crc kubenswrapper[4690]: E1211 14:15:27.707654 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:28.207636073 +0000 UTC m=+59.823037716 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.707894 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd40416-92d2-41ec-b6ae-ba668ccc5685-catalog-content\") pod \"certified-operators-fs97w\" (UID: \"8cd40416-92d2-41ec-b6ae-ba668ccc5685\") " pod="openshift-marketplace/certified-operators-fs97w" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.732748 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-knccb\" (UniqueName: \"kubernetes.io/projected/8cd40416-92d2-41ec-b6ae-ba668ccc5685-kube-api-access-knccb\") pod \"certified-operators-fs97w\" (UID: \"8cd40416-92d2-41ec-b6ae-ba668ccc5685\") " pod="openshift-marketplace/certified-operators-fs97w" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.733648 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:27 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:27 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:27 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.733683 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.777615 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fs97w" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.808079 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.808128 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j96wl\" (UniqueName: \"kubernetes.io/projected/907b18d4-d2b5-47c9-9c70-716bd64330ae-kube-api-access-j96wl\") pod \"community-operators-kmjlt\" (UID: \"907b18d4-d2b5-47c9-9c70-716bd64330ae\") " pod="openshift-marketplace/community-operators-kmjlt" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.808197 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/907b18d4-d2b5-47c9-9c70-716bd64330ae-utilities\") pod \"community-operators-kmjlt\" (UID: \"907b18d4-d2b5-47c9-9c70-716bd64330ae\") " pod="openshift-marketplace/community-operators-kmjlt" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.808239 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/907b18d4-d2b5-47c9-9c70-716bd64330ae-catalog-content\") pod \"community-operators-kmjlt\" (UID: \"907b18d4-d2b5-47c9-9c70-716bd64330ae\") " pod="openshift-marketplace/community-operators-kmjlt" Dec 11 14:15:27 crc kubenswrapper[4690]: E1211 14:15:27.808532 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:28.308517385 +0000 UTC m=+59.923919018 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.823298 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-8bfnc"] Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.856737 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-djmxn"] Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.858731 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-djmxn" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.867448 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-djmxn"] Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.909188 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:27 crc kubenswrapper[4690]: E1211 14:15:27.909988 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:28.409969662 +0000 UTC m=+60.025371305 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.910155 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.910195 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j96wl\" (UniqueName: \"kubernetes.io/projected/907b18d4-d2b5-47c9-9c70-716bd64330ae-kube-api-access-j96wl\") pod \"community-operators-kmjlt\" (UID: \"907b18d4-d2b5-47c9-9c70-716bd64330ae\") " pod="openshift-marketplace/community-operators-kmjlt" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.910288 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/907b18d4-d2b5-47c9-9c70-716bd64330ae-utilities\") pod \"community-operators-kmjlt\" (UID: \"907b18d4-d2b5-47c9-9c70-716bd64330ae\") " pod="openshift-marketplace/community-operators-kmjlt" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.910323 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/907b18d4-d2b5-47c9-9c70-716bd64330ae-catalog-content\") pod \"community-operators-kmjlt\" (UID: \"907b18d4-d2b5-47c9-9c70-716bd64330ae\") " pod="openshift-marketplace/community-operators-kmjlt" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.910707 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/907b18d4-d2b5-47c9-9c70-716bd64330ae-catalog-content\") pod \"community-operators-kmjlt\" (UID: \"907b18d4-d2b5-47c9-9c70-716bd64330ae\") " pod="openshift-marketplace/community-operators-kmjlt" Dec 11 14:15:27 crc kubenswrapper[4690]: E1211 14:15:27.910926 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:28.410918666 +0000 UTC m=+60.026320309 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.911375 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/907b18d4-d2b5-47c9-9c70-716bd64330ae-utilities\") pod \"community-operators-kmjlt\" (UID: \"907b18d4-d2b5-47c9-9c70-716bd64330ae\") " pod="openshift-marketplace/community-operators-kmjlt" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.954934 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j96wl\" (UniqueName: \"kubernetes.io/projected/907b18d4-d2b5-47c9-9c70-716bd64330ae-kube-api-access-j96wl\") pod \"community-operators-kmjlt\" (UID: \"907b18d4-d2b5-47c9-9c70-716bd64330ae\") " pod="openshift-marketplace/community-operators-kmjlt" Dec 11 14:15:27 crc kubenswrapper[4690]: I1211 14:15:27.986468 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kmjlt" Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.012724 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.012971 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fca9021-9a9f-4d5d-9892-37f39c580323-catalog-content\") pod \"certified-operators-djmxn\" (UID: \"9fca9021-9a9f-4d5d-9892-37f39c580323\") " pod="openshift-marketplace/certified-operators-djmxn" Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.013017 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klnm2\" (UniqueName: \"kubernetes.io/projected/9fca9021-9a9f-4d5d-9892-37f39c580323-kube-api-access-klnm2\") pod \"certified-operators-djmxn\" (UID: \"9fca9021-9a9f-4d5d-9892-37f39c580323\") " pod="openshift-marketplace/certified-operators-djmxn" Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.013084 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fca9021-9a9f-4d5d-9892-37f39c580323-utilities\") pod \"certified-operators-djmxn\" (UID: \"9fca9021-9a9f-4d5d-9892-37f39c580323\") " pod="openshift-marketplace/certified-operators-djmxn" Dec 11 14:15:28 crc kubenswrapper[4690]: E1211 14:15:28.013184 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:28.513167892 +0000 UTC m=+60.128569535 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.017784 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-q2v54" Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.115759 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fca9021-9a9f-4d5d-9892-37f39c580323-catalog-content\") pod \"certified-operators-djmxn\" (UID: \"9fca9021-9a9f-4d5d-9892-37f39c580323\") " pod="openshift-marketplace/certified-operators-djmxn" Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.115865 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:28 crc kubenswrapper[4690]: E1211 14:15:28.116168 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:28.616156978 +0000 UTC m=+60.231558621 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.116311 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klnm2\" (UniqueName: \"kubernetes.io/projected/9fca9021-9a9f-4d5d-9892-37f39c580323-kube-api-access-klnm2\") pod \"certified-operators-djmxn\" (UID: \"9fca9021-9a9f-4d5d-9892-37f39c580323\") " pod="openshift-marketplace/certified-operators-djmxn" Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.116385 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fca9021-9a9f-4d5d-9892-37f39c580323-utilities\") pod \"certified-operators-djmxn\" (UID: \"9fca9021-9a9f-4d5d-9892-37f39c580323\") " pod="openshift-marketplace/certified-operators-djmxn" Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.116644 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fca9021-9a9f-4d5d-9892-37f39c580323-catalog-content\") pod \"certified-operators-djmxn\" (UID: \"9fca9021-9a9f-4d5d-9892-37f39c580323\") " pod="openshift-marketplace/certified-operators-djmxn" Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.116767 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fca9021-9a9f-4d5d-9892-37f39c580323-utilities\") pod \"certified-operators-djmxn\" (UID: \"9fca9021-9a9f-4d5d-9892-37f39c580323\") " pod="openshift-marketplace/certified-operators-djmxn" Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.148751 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klnm2\" (UniqueName: \"kubernetes.io/projected/9fca9021-9a9f-4d5d-9892-37f39c580323-kube-api-access-klnm2\") pod \"certified-operators-djmxn\" (UID: \"9fca9021-9a9f-4d5d-9892-37f39c580323\") " pod="openshift-marketplace/certified-operators-djmxn" Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.188337 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fs97w"] Dec 11 14:15:28 crc kubenswrapper[4690]: W1211 14:15:28.197494 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8cd40416_92d2_41ec_b6ae_ba668ccc5685.slice/crio-a834198ed9baf9d6ce8a33206ca0e602171592f634be7c55342b91fc3484b89b WatchSource:0}: Error finding container a834198ed9baf9d6ce8a33206ca0e602171592f634be7c55342b91fc3484b89b: Status 404 returned error can't find the container with id a834198ed9baf9d6ce8a33206ca0e602171592f634be7c55342b91fc3484b89b Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.217600 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:28 crc kubenswrapper[4690]: E1211 14:15:28.217932 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:28.717916272 +0000 UTC m=+60.333317915 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.224275 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-djmxn" Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.320802 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:28 crc kubenswrapper[4690]: E1211 14:15:28.321210 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:28.821192175 +0000 UTC m=+60.436593818 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.326497 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kmjlt"] Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.421996 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:28 crc kubenswrapper[4690]: E1211 14:15:28.422176 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:28.922153889 +0000 UTC m=+60.537555532 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.422390 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:28 crc kubenswrapper[4690]: E1211 14:15:28.422806 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:28.922796296 +0000 UTC m=+60.538197939 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.445426 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-djmxn"] Dec 11 14:15:28 crc kubenswrapper[4690]: W1211 14:15:28.452409 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9fca9021_9a9f_4d5d_9892_37f39c580323.slice/crio-bf7869d3bb518156bdb2191434241ee80efca3e2a95f832cc697029db8462933 WatchSource:0}: Error finding container bf7869d3bb518156bdb2191434241ee80efca3e2a95f832cc697029db8462933: Status 404 returned error can't find the container with id bf7869d3bb518156bdb2191434241ee80efca3e2a95f832cc697029db8462933 Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.469051 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djmxn" event={"ID":"9fca9021-9a9f-4d5d-9892-37f39c580323","Type":"ContainerStarted","Data":"bf7869d3bb518156bdb2191434241ee80efca3e2a95f832cc697029db8462933"} Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.470147 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fs97w" event={"ID":"8cd40416-92d2-41ec-b6ae-ba668ccc5685","Type":"ContainerStarted","Data":"a834198ed9baf9d6ce8a33206ca0e602171592f634be7c55342b91fc3484b89b"} Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.471387 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bfnc" event={"ID":"646a2a29-480a-4725-9407-80d8a4f2a4bb","Type":"ContainerStarted","Data":"5c48c22f578c7e27e1350aea0956820eda8ad6c16458898c8c74ccb8b7d8864e"} Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.473214 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4gxkn" event={"ID":"e8f5b2a0-33a0-46b8-9f75-6d732599e8e1","Type":"ContainerStarted","Data":"fb2e49e250013db8ab8b229530882fa31acb0d053ee0289199bd18fd95bcd86c"} Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.474584 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kmjlt" event={"ID":"907b18d4-d2b5-47c9-9c70-716bd64330ae","Type":"ContainerStarted","Data":"9630c8bab4f380ae02382d3f978e6a624f0ce681a6207be9eadedb4f64916ce7"} Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.475995 4690 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-r2jbn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.476151 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.483115 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-8l25g" Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.523467 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:28 crc kubenswrapper[4690]: E1211 14:15:28.523888 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:29.023872713 +0000 UTC m=+60.639274356 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.988253 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:28 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:28 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:28 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.988734 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:28 crc kubenswrapper[4690]: I1211 14:15:28.993326 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:29 crc kubenswrapper[4690]: E1211 14:15:29.028429 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:29.528413417 +0000 UTC m=+61.143815060 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.097381 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:29 crc kubenswrapper[4690]: E1211 14:15:29.097797 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:29.597774542 +0000 UTC m=+61.213176185 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.129144 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.201388 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:29 crc kubenswrapper[4690]: E1211 14:15:29.201769 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:29.701756693 +0000 UTC m=+61.317158326 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.280554 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-k2lrt"] Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.281806 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k2lrt" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.285815 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k2lrt"] Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.297017 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.302817 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.303114 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwgxp\" (UniqueName: \"kubernetes.io/projected/213bdee3-5fb6-4221-819e-43ec7a01f555-kube-api-access-pwgxp\") pod \"redhat-marketplace-k2lrt\" (UID: \"213bdee3-5fb6-4221-819e-43ec7a01f555\") " pod="openshift-marketplace/redhat-marketplace-k2lrt" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.303196 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/213bdee3-5fb6-4221-819e-43ec7a01f555-utilities\") pod \"redhat-marketplace-k2lrt\" (UID: \"213bdee3-5fb6-4221-819e-43ec7a01f555\") " pod="openshift-marketplace/redhat-marketplace-k2lrt" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.303267 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/213bdee3-5fb6-4221-819e-43ec7a01f555-catalog-content\") pod \"redhat-marketplace-k2lrt\" (UID: \"213bdee3-5fb6-4221-819e-43ec7a01f555\") " pod="openshift-marketplace/redhat-marketplace-k2lrt" Dec 11 14:15:29 crc kubenswrapper[4690]: E1211 14:15:29.303412 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:29.803392244 +0000 UTC m=+61.418793887 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.404938 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwgxp\" (UniqueName: \"kubernetes.io/projected/213bdee3-5fb6-4221-819e-43ec7a01f555-kube-api-access-pwgxp\") pod \"redhat-marketplace-k2lrt\" (UID: \"213bdee3-5fb6-4221-819e-43ec7a01f555\") " pod="openshift-marketplace/redhat-marketplace-k2lrt" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.405417 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/213bdee3-5fb6-4221-819e-43ec7a01f555-utilities\") pod \"redhat-marketplace-k2lrt\" (UID: \"213bdee3-5fb6-4221-819e-43ec7a01f555\") " pod="openshift-marketplace/redhat-marketplace-k2lrt" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.405457 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.405524 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/213bdee3-5fb6-4221-819e-43ec7a01f555-catalog-content\") pod \"redhat-marketplace-k2lrt\" (UID: \"213bdee3-5fb6-4221-819e-43ec7a01f555\") " pod="openshift-marketplace/redhat-marketplace-k2lrt" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.406153 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/213bdee3-5fb6-4221-819e-43ec7a01f555-catalog-content\") pod \"redhat-marketplace-k2lrt\" (UID: \"213bdee3-5fb6-4221-819e-43ec7a01f555\") " pod="openshift-marketplace/redhat-marketplace-k2lrt" Dec 11 14:15:29 crc kubenswrapper[4690]: E1211 14:15:29.406449 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:29.906433631 +0000 UTC m=+61.521835274 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.406641 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/213bdee3-5fb6-4221-819e-43ec7a01f555-utilities\") pod \"redhat-marketplace-k2lrt\" (UID: \"213bdee3-5fb6-4221-819e-43ec7a01f555\") " pod="openshift-marketplace/redhat-marketplace-k2lrt" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.440018 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwgxp\" (UniqueName: \"kubernetes.io/projected/213bdee3-5fb6-4221-819e-43ec7a01f555-kube-api-access-pwgxp\") pod \"redhat-marketplace-k2lrt\" (UID: \"213bdee3-5fb6-4221-819e-43ec7a01f555\") " pod="openshift-marketplace/redhat-marketplace-k2lrt" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.505980 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:29 crc kubenswrapper[4690]: E1211 14:15:29.506417 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:30.00640104 +0000 UTC m=+61.621802683 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.519838 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k2lrt" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.520710 4690 generic.go:334] "Generic (PLEG): container finished" podID="646a2a29-480a-4725-9407-80d8a4f2a4bb" containerID="fe51b94909bb6b47451400c94b18c317719c35854a48dc2f0b09bc571e58380c" exitCode=0 Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.520767 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bfnc" event={"ID":"646a2a29-480a-4725-9407-80d8a4f2a4bb","Type":"ContainerDied","Data":"fe51b94909bb6b47451400c94b18c317719c35854a48dc2f0b09bc571e58380c"} Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.523981 4690 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.548146 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-zsqv7" event={"ID":"a3d8ec77-4a18-4938-970d-4d83f834a70e","Type":"ContainerStarted","Data":"9d7827c609acaa276efe8d3539b7a02c352bd94ff4e95942d750179dcccfa680"} Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.558188 4690 generic.go:334] "Generic (PLEG): container finished" podID="9fca9021-9a9f-4d5d-9892-37f39c580323" containerID="cf7e1f734f99c12908234fbcb5581753a863e60d4b305ad4599bf66401789e60" exitCode=0 Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.558867 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djmxn" event={"ID":"9fca9021-9a9f-4d5d-9892-37f39c580323","Type":"ContainerDied","Data":"cf7e1f734f99c12908234fbcb5581753a863e60d4b305ad4599bf66401789e60"} Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.596485 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-t9jhb" event={"ID":"080f2e20-451b-4825-89bd-2f35b98158b7","Type":"ContainerStarted","Data":"921bc35f030bff1e01fc8c3655d7bb0f12f0d84a534909c490a8d97aa817c2a5"} Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.597202 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-t9jhb" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.607975 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:29 crc kubenswrapper[4690]: E1211 14:15:29.610634 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:30.110619936 +0000 UTC m=+61.726021579 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.623495 4690 generic.go:334] "Generic (PLEG): container finished" podID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" containerID="7d1779c6af101abec489772c87b0d83f77b012c22810a65719fce0232af65a3d" exitCode=0 Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.623604 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fs97w" event={"ID":"8cd40416-92d2-41ec-b6ae-ba668ccc5685","Type":"ContainerDied","Data":"7d1779c6af101abec489772c87b0d83f77b012c22810a65719fce0232af65a3d"} Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.686313 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-z8tth" event={"ID":"0fd85a56-df39-46c5-82a5-808a37a3dc8e","Type":"ContainerStarted","Data":"e5caa685b6d56158ed448fc944c6b543c568f4a333f539846b9b4de40ef53a13"} Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.708449 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-r7754"] Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.709578 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:29 crc kubenswrapper[4690]: E1211 14:15:29.714918 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:30.214893564 +0000 UTC m=+61.830295207 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.717743 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r7754" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.730611 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r7754"] Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.737308 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-r56mp" event={"ID":"6ddc4574-895b-41fa-a8c9-47c4f303d0d9","Type":"ContainerStarted","Data":"0b257fc1c478e304aabc26630689902faed9bef430363f67bfd120fe26a099b4"} Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.750277 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5n4lj" event={"ID":"f62b5697-8059-4222-b731-2f8188e9a270","Type":"ContainerStarted","Data":"515ab1e1fe6852f77e3469c830bcecb8147555780e81119f7aed56854ece3649"} Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.758276 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:29 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:29 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:29 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.758673 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.767487 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-sgjk8" event={"ID":"807380a2-3571-4626-b33a-71e79bdbf087","Type":"ContainerStarted","Data":"8baf6162560c928cdf56e299f2de8816a1f3c3e057727e950fa6f2db700df8f6"} Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.767533 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-sgjk8" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.801539 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-zsqv7" podStartSLOduration=25.801495295 podStartE2EDuration="25.801495295s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:29.747620112 +0000 UTC m=+61.363021755" watchObservedRunningTime="2025-12-11 14:15:29.801495295 +0000 UTC m=+61.416896938" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.810534 4690 generic.go:334] "Generic (PLEG): container finished" podID="907b18d4-d2b5-47c9-9c70-716bd64330ae" containerID="4d7a042256371098662d65e6df7b034e5322a4d1e6e2ea8586b4a5913b124f5b" exitCode=0 Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.810650 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kmjlt" event={"ID":"907b18d4-d2b5-47c9-9c70-716bd64330ae","Type":"ContainerDied","Data":"4d7a042256371098662d65e6df7b034e5322a4d1e6e2ea8586b4a5913b124f5b"} Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.816002 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffb685f2-c2fc-4602-8d81-5f11b6581f29-utilities\") pod \"redhat-marketplace-r7754\" (UID: \"ffb685f2-c2fc-4602-8d81-5f11b6581f29\") " pod="openshift-marketplace/redhat-marketplace-r7754" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.816168 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.817186 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tc9w\" (UniqueName: \"kubernetes.io/projected/ffb685f2-c2fc-4602-8d81-5f11b6581f29-kube-api-access-8tc9w\") pod \"redhat-marketplace-r7754\" (UID: \"ffb685f2-c2fc-4602-8d81-5f11b6581f29\") " pod="openshift-marketplace/redhat-marketplace-r7754" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.817331 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffb685f2-c2fc-4602-8d81-5f11b6581f29-catalog-content\") pod \"redhat-marketplace-r7754\" (UID: \"ffb685f2-c2fc-4602-8d81-5f11b6581f29\") " pod="openshift-marketplace/redhat-marketplace-r7754" Dec 11 14:15:29 crc kubenswrapper[4690]: E1211 14:15:29.819175 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:30.319163582 +0000 UTC m=+61.934565225 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.819448 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-z8tth" podStartSLOduration=25.819437089 podStartE2EDuration="25.819437089s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:29.817250824 +0000 UTC m=+61.432652467" watchObservedRunningTime="2025-12-11 14:15:29.819437089 +0000 UTC m=+61.434838742" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.816786 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" event={"ID":"5f673e58-d96d-4194-8c5e-53be672a7996","Type":"ContainerStarted","Data":"6e1021211b7920f8f026c108505da6edfc48d1f27acd761c17016e97dbeaa184"} Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.843503 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" event={"ID":"ef32fe73-cf65-4729-9f54-5cffddba74e1","Type":"ContainerStarted","Data":"68384f35fcb5a6471f74e4799cdd276d6c38ab106161ccd1ff68b3c5169f7115"} Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.843990 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" podUID="7731421f-ecdd-4ff8-aa63-79ae9983425f" containerName="kube-multus-additional-cni-plugins" containerID="cri-o://dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" gracePeriod=30 Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.888291 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-j9qmk" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.921555 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.921876 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffb685f2-c2fc-4602-8d81-5f11b6581f29-catalog-content\") pod \"redhat-marketplace-r7754\" (UID: \"ffb685f2-c2fc-4602-8d81-5f11b6581f29\") " pod="openshift-marketplace/redhat-marketplace-r7754" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.921910 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.922146 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.922457 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffb685f2-c2fc-4602-8d81-5f11b6581f29-utilities\") pod \"redhat-marketplace-r7754\" (UID: \"ffb685f2-c2fc-4602-8d81-5f11b6581f29\") " pod="openshift-marketplace/redhat-marketplace-r7754" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.922519 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tc9w\" (UniqueName: \"kubernetes.io/projected/ffb685f2-c2fc-4602-8d81-5f11b6581f29-kube-api-access-8tc9w\") pod \"redhat-marketplace-r7754\" (UID: \"ffb685f2-c2fc-4602-8d81-5f11b6581f29\") " pod="openshift-marketplace/redhat-marketplace-r7754" Dec 11 14:15:29 crc kubenswrapper[4690]: E1211 14:15:29.923185 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:30.423165503 +0000 UTC m=+62.038567216 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.926974 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffb685f2-c2fc-4602-8d81-5f11b6581f29-utilities\") pod \"redhat-marketplace-r7754\" (UID: \"ffb685f2-c2fc-4602-8d81-5f11b6581f29\") " pod="openshift-marketplace/redhat-marketplace-r7754" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.928633 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffb685f2-c2fc-4602-8d81-5f11b6581f29-catalog-content\") pod \"redhat-marketplace-r7754\" (UID: \"ffb685f2-c2fc-4602-8d81-5f11b6581f29\") " pod="openshift-marketplace/redhat-marketplace-r7754" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.935585 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.944314 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.977759 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.987922 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tc9w\" (UniqueName: \"kubernetes.io/projected/ffb685f2-c2fc-4602-8d81-5f11b6581f29-kube-api-access-8tc9w\") pod \"redhat-marketplace-r7754\" (UID: \"ffb685f2-c2fc-4602-8d81-5f11b6581f29\") " pod="openshift-marketplace/redhat-marketplace-r7754" Dec 11 14:15:29 crc kubenswrapper[4690]: I1211 14:15:29.988396 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-sgjk8" podStartSLOduration=25.988373853 podStartE2EDuration="25.988373853s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:29.987300756 +0000 UTC m=+61.602702399" watchObservedRunningTime="2025-12-11 14:15:29.988373853 +0000 UTC m=+61.603775496" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.018793 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-t9jhb" podStartSLOduration=11.018059874 podStartE2EDuration="11.018059874s" podCreationTimestamp="2025-12-11 14:15:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:30.011612201 +0000 UTC m=+61.627013844" watchObservedRunningTime="2025-12-11 14:15:30.018059874 +0000 UTC m=+61.633461517" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.023943 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.024487 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.024673 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:15:30 crc kubenswrapper[4690]: E1211 14:15:30.024940 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:30.524916807 +0000 UTC m=+62.140318540 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.030437 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.043652 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.053967 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.064222 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5n4lj" podStartSLOduration=26.064202921 podStartE2EDuration="26.064202921s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:30.061216816 +0000 UTC m=+61.676618459" watchObservedRunningTime="2025-12-11 14:15:30.064202921 +0000 UTC m=+61.679604564" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.073074 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.102162 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r7754" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.104558 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-4gxkn" podStartSLOduration=26.104540112 podStartE2EDuration="26.104540112s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:30.102301385 +0000 UTC m=+61.717703028" watchObservedRunningTime="2025-12-11 14:15:30.104540112 +0000 UTC m=+61.719941755" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.109085 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.125279 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:30 crc kubenswrapper[4690]: E1211 14:15:30.126499 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:30.626481807 +0000 UTC m=+62.241883450 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.175159 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-cbsq8" podStartSLOduration=26.175139838 podStartE2EDuration="26.175139838s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:30.162291653 +0000 UTC m=+61.777693296" watchObservedRunningTime="2025-12-11 14:15:30.175139838 +0000 UTC m=+61.790541471" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.201820 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-r5xkr" podStartSLOduration=26.201795842 podStartE2EDuration="26.201795842s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:30.194447706 +0000 UTC m=+61.809849349" watchObservedRunningTime="2025-12-11 14:15:30.201795842 +0000 UTC m=+61.817197485" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.234783 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:30 crc kubenswrapper[4690]: E1211 14:15:30.235166 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:30.735153396 +0000 UTC m=+62.350555039 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.252287 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.277298 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xp8q4" podStartSLOduration=26.277275182 podStartE2EDuration="26.277275182s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:30.2740523 +0000 UTC m=+61.889453963" watchObservedRunningTime="2025-12-11 14:15:30.277275182 +0000 UTC m=+61.892676825" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.298109 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k2lrt"] Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.336130 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.337098 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:30 crc kubenswrapper[4690]: E1211 14:15:30.337383 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:30.837362482 +0000 UTC m=+62.452764125 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.402344 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.441892 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:30 crc kubenswrapper[4690]: E1211 14:15:30.442298 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:30.942282666 +0000 UTC m=+62.557684309 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.546453 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:30 crc kubenswrapper[4690]: E1211 14:15:30.546814 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:31.04679871 +0000 UTC m=+62.662200353 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.648152 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:30 crc kubenswrapper[4690]: E1211 14:15:30.648508 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:31.148494113 +0000 UTC m=+62.763895756 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.674617 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-24xfd"] Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.675801 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-24xfd" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.681346 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.691368 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-24xfd"] Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.732681 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:30 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:30 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:30 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.733145 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.749647 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.749921 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxxrx\" (UniqueName: \"kubernetes.io/projected/0c61312d-523c-44a8-a451-dc96bba0f6d7-kube-api-access-wxxrx\") pod \"redhat-operators-24xfd\" (UID: \"0c61312d-523c-44a8-a451-dc96bba0f6d7\") " pod="openshift-marketplace/redhat-operators-24xfd" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.749972 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c61312d-523c-44a8-a451-dc96bba0f6d7-utilities\") pod \"redhat-operators-24xfd\" (UID: \"0c61312d-523c-44a8-a451-dc96bba0f6d7\") " pod="openshift-marketplace/redhat-operators-24xfd" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.750049 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c61312d-523c-44a8-a451-dc96bba0f6d7-catalog-content\") pod \"redhat-operators-24xfd\" (UID: \"0c61312d-523c-44a8-a451-dc96bba0f6d7\") " pod="openshift-marketplace/redhat-operators-24xfd" Dec 11 14:15:30 crc kubenswrapper[4690]: E1211 14:15:30.750171 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:31.250148135 +0000 UTC m=+62.865549778 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.851626 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c61312d-523c-44a8-a451-dc96bba0f6d7-catalog-content\") pod \"redhat-operators-24xfd\" (UID: \"0c61312d-523c-44a8-a451-dc96bba0f6d7\") " pod="openshift-marketplace/redhat-operators-24xfd" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.851705 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.851752 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxxrx\" (UniqueName: \"kubernetes.io/projected/0c61312d-523c-44a8-a451-dc96bba0f6d7-kube-api-access-wxxrx\") pod \"redhat-operators-24xfd\" (UID: \"0c61312d-523c-44a8-a451-dc96bba0f6d7\") " pod="openshift-marketplace/redhat-operators-24xfd" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.851780 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c61312d-523c-44a8-a451-dc96bba0f6d7-utilities\") pod \"redhat-operators-24xfd\" (UID: \"0c61312d-523c-44a8-a451-dc96bba0f6d7\") " pod="openshift-marketplace/redhat-operators-24xfd" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.852205 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c61312d-523c-44a8-a451-dc96bba0f6d7-catalog-content\") pod \"redhat-operators-24xfd\" (UID: \"0c61312d-523c-44a8-a451-dc96bba0f6d7\") " pod="openshift-marketplace/redhat-operators-24xfd" Dec 11 14:15:30 crc kubenswrapper[4690]: E1211 14:15:30.852423 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:31.352390861 +0000 UTC m=+62.967792664 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.853852 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c61312d-523c-44a8-a451-dc96bba0f6d7-utilities\") pod \"redhat-operators-24xfd\" (UID: \"0c61312d-523c-44a8-a451-dc96bba0f6d7\") " pod="openshift-marketplace/redhat-operators-24xfd" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.856253 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-r7754"] Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.858808 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k2lrt" event={"ID":"213bdee3-5fb6-4221-819e-43ec7a01f555","Type":"ContainerStarted","Data":"1dc9f244424a65e43dd272abaa9baeda104378db3b395aa15a88ed467af2e316"} Dec 11 14:15:30 crc kubenswrapper[4690]: W1211 14:15:30.862893 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podffb685f2_c2fc_4602_8d81_5f11b6581f29.slice/crio-6e2823f11348fb19559feae969ea88b6c99bf154f1886c272b782576c545f7f6 WatchSource:0}: Error finding container 6e2823f11348fb19559feae969ea88b6c99bf154f1886c272b782576c545f7f6: Status 404 returned error can't find the container with id 6e2823f11348fb19559feae969ea88b6c99bf154f1886c272b782576c545f7f6 Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.870159 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-r56mp" event={"ID":"6ddc4574-895b-41fa-a8c9-47c4f303d0d9","Type":"ContainerStarted","Data":"2f6a33ac2c5b92954a82ce68fb6fc601c35a7aaee51fdfb2b5bfecdd1cb23eb4"} Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.877840 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"03589fde85b2693e88f8da22cc485b925ecd09e30240d9c219d6ba043152b8c6"} Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.889245 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxxrx\" (UniqueName: \"kubernetes.io/projected/0c61312d-523c-44a8-a451-dc96bba0f6d7-kube-api-access-wxxrx\") pod \"redhat-operators-24xfd\" (UID: \"0c61312d-523c-44a8-a451-dc96bba0f6d7\") " pod="openshift-marketplace/redhat-operators-24xfd" Dec 11 14:15:30 crc kubenswrapper[4690]: I1211 14:15:30.952167 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:30 crc kubenswrapper[4690]: E1211 14:15:30.953937 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:31.45391428 +0000 UTC m=+63.069315923 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.003424 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.005809 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.034883 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.036074 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.046652 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.054522 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:31 crc kubenswrapper[4690]: E1211 14:15:31.054841 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:31.554827363 +0000 UTC m=+63.170229006 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.093747 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xq68s"] Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.096054 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xq68s" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.100692 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xq68s"] Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.126510 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-24xfd" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.155868 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.156322 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a17527a0-5547-476d-96ef-beabab0cee1c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a17527a0-5547-476d-96ef-beabab0cee1c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.156422 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a17527a0-5547-476d-96ef-beabab0cee1c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a17527a0-5547-476d-96ef-beabab0cee1c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 14:15:31 crc kubenswrapper[4690]: E1211 14:15:31.157160 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:31.65713088 +0000 UTC m=+63.272532523 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.263157 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfw4x\" (UniqueName: \"kubernetes.io/projected/09e8430b-2226-4933-9dcf-ee3b5de076c3-kube-api-access-zfw4x\") pod \"redhat-operators-xq68s\" (UID: \"09e8430b-2226-4933-9dcf-ee3b5de076c3\") " pod="openshift-marketplace/redhat-operators-xq68s" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.263539 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09e8430b-2226-4933-9dcf-ee3b5de076c3-catalog-content\") pod \"redhat-operators-xq68s\" (UID: \"09e8430b-2226-4933-9dcf-ee3b5de076c3\") " pod="openshift-marketplace/redhat-operators-xq68s" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.263577 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a17527a0-5547-476d-96ef-beabab0cee1c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a17527a0-5547-476d-96ef-beabab0cee1c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.263604 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09e8430b-2226-4933-9dcf-ee3b5de076c3-utilities\") pod \"redhat-operators-xq68s\" (UID: \"09e8430b-2226-4933-9dcf-ee3b5de076c3\") " pod="openshift-marketplace/redhat-operators-xq68s" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.263670 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.263655 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a17527a0-5547-476d-96ef-beabab0cee1c-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a17527a0-5547-476d-96ef-beabab0cee1c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.263709 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a17527a0-5547-476d-96ef-beabab0cee1c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a17527a0-5547-476d-96ef-beabab0cee1c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 14:15:31 crc kubenswrapper[4690]: E1211 14:15:31.265102 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:31.765072941 +0000 UTC m=+63.380474584 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.309303 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a17527a0-5547-476d-96ef-beabab0cee1c-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a17527a0-5547-476d-96ef-beabab0cee1c\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.364369 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.364669 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfw4x\" (UniqueName: \"kubernetes.io/projected/09e8430b-2226-4933-9dcf-ee3b5de076c3-kube-api-access-zfw4x\") pod \"redhat-operators-xq68s\" (UID: \"09e8430b-2226-4933-9dcf-ee3b5de076c3\") " pod="openshift-marketplace/redhat-operators-xq68s" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.364709 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09e8430b-2226-4933-9dcf-ee3b5de076c3-catalog-content\") pod \"redhat-operators-xq68s\" (UID: \"09e8430b-2226-4933-9dcf-ee3b5de076c3\") " pod="openshift-marketplace/redhat-operators-xq68s" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.364736 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09e8430b-2226-4933-9dcf-ee3b5de076c3-utilities\") pod \"redhat-operators-xq68s\" (UID: \"09e8430b-2226-4933-9dcf-ee3b5de076c3\") " pod="openshift-marketplace/redhat-operators-xq68s" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.365226 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09e8430b-2226-4933-9dcf-ee3b5de076c3-utilities\") pod \"redhat-operators-xq68s\" (UID: \"09e8430b-2226-4933-9dcf-ee3b5de076c3\") " pod="openshift-marketplace/redhat-operators-xq68s" Dec 11 14:15:31 crc kubenswrapper[4690]: E1211 14:15:31.365330 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:31.865310167 +0000 UTC m=+63.480711820 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.365855 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09e8430b-2226-4933-9dcf-ee3b5de076c3-catalog-content\") pod \"redhat-operators-xq68s\" (UID: \"09e8430b-2226-4933-9dcf-ee3b5de076c3\") " pod="openshift-marketplace/redhat-operators-xq68s" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.373857 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.407250 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfw4x\" (UniqueName: \"kubernetes.io/projected/09e8430b-2226-4933-9dcf-ee3b5de076c3-kube-api-access-zfw4x\") pod \"redhat-operators-xq68s\" (UID: \"09e8430b-2226-4933-9dcf-ee3b5de076c3\") " pod="openshift-marketplace/redhat-operators-xq68s" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.465921 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:31 crc kubenswrapper[4690]: E1211 14:15:31.466313 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:31.966300732 +0000 UTC m=+63.581702375 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.497235 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-24xfd"] Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.501396 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xq68s" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.569048 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:31 crc kubenswrapper[4690]: E1211 14:15:31.569267 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:32.069233966 +0000 UTC m=+63.684635619 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.569432 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:31 crc kubenswrapper[4690]: E1211 14:15:31.570206 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:32.07019235 +0000 UTC m=+63.685593993 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.611206 4690 patch_prober.go:28] interesting pod/downloads-7954f5f757-8klv5 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.611260 4690 patch_prober.go:28] interesting pod/downloads-7954f5f757-8klv5 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.611265 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-8klv5" podUID="4ac34e73-25e1-449d-9c43-e6bc5054ede8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.611308 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-8klv5" podUID="4ac34e73-25e1-449d-9c43-e6bc5054ede8" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.665463 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.665760 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.666748 4690 patch_prober.go:28] interesting pod/console-f9d7485db-rb4j9 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.19:8443/health\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.666793 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-rb4j9" podUID="fbdc74c8-8b96-479b-b06c-637acb1bb68a" containerName="console" probeResult="failure" output="Get \"https://10.217.0.19:8443/health\": dial tcp 10.217.0.19:8443: connect: connection refused" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.671774 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:31 crc kubenswrapper[4690]: E1211 14:15:31.673032 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:32.173012511 +0000 UTC m=+63.788414154 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.728869 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.731803 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:31 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:31 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:31 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.731875 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.773415 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:31 crc kubenswrapper[4690]: E1211 14:15:31.774472 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:32.274451707 +0000 UTC m=+63.889853430 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.807067 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.818858 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xq68s"] Dec 11 14:15:31 crc kubenswrapper[4690]: W1211 14:15:31.837107 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod09e8430b_2226_4933_9dcf_ee3b5de076c3.slice/crio-6c6e5b10ab20157e83281f40f49ca885447f6de8380bdae723a926c7815273e8 WatchSource:0}: Error finding container 6c6e5b10ab20157e83281f40f49ca885447f6de8380bdae723a926c7815273e8: Status 404 returned error can't find the container with id 6c6e5b10ab20157e83281f40f49ca885447f6de8380bdae723a926c7815273e8 Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.874397 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:31 crc kubenswrapper[4690]: E1211 14:15:31.874687 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:32.374673093 +0000 UTC m=+63.990074736 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.896383 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a17527a0-5547-476d-96ef-beabab0cee1c","Type":"ContainerStarted","Data":"3e28667b0ea2a62678217b1ae99fada0c967f231c454986113281cfe93bbfeca"} Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.900768 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"44b86dc214e09764bd6376fb5c7db17cadfdb7bf0240c45f5514ab42b881d293"} Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.902168 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"6716bb005dd503bfe6e8af0373a002a2542a65b7f8d5f7e3e6822f3239b8aaf2"} Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.903745 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xq68s" event={"ID":"09e8430b-2226-4933-9dcf-ee3b5de076c3","Type":"ContainerStarted","Data":"6c6e5b10ab20157e83281f40f49ca885447f6de8380bdae723a926c7815273e8"} Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.904519 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-24xfd" event={"ID":"0c61312d-523c-44a8-a451-dc96bba0f6d7","Type":"ContainerStarted","Data":"8055ffde4bebb8658d3e65f630f5be61cfc071fd4175df8cb1589c8d290bc0e7"} Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.916527 4690 generic.go:334] "Generic (PLEG): container finished" podID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" containerID="0d89986ecb6c5c063b8d37076880275ca2bdc91f593e17ac6848ea3da47e2b45" exitCode=0 Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.916598 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r7754" event={"ID":"ffb685f2-c2fc-4602-8d81-5f11b6581f29","Type":"ContainerDied","Data":"0d89986ecb6c5c063b8d37076880275ca2bdc91f593e17ac6848ea3da47e2b45"} Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.916627 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r7754" event={"ID":"ffb685f2-c2fc-4602-8d81-5f11b6581f29","Type":"ContainerStarted","Data":"6e2823f11348fb19559feae969ea88b6c99bf154f1886c272b782576c545f7f6"} Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.917927 4690 generic.go:334] "Generic (PLEG): container finished" podID="213bdee3-5fb6-4221-819e-43ec7a01f555" containerID="963dde45b3cebc74b05ae4fa9f4e403600095dc5361ef8a089a3c567795d4ccf" exitCode=0 Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.919135 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k2lrt" event={"ID":"213bdee3-5fb6-4221-819e-43ec7a01f555","Type":"ContainerDied","Data":"963dde45b3cebc74b05ae4fa9f4e403600095dc5361ef8a089a3c567795d4ccf"} Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.944114 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.970770 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-r56mp" podStartSLOduration=27.970749904 podStartE2EDuration="27.970749904s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:31.97021199 +0000 UTC m=+63.585613643" watchObservedRunningTime="2025-12-11 14:15:31.970749904 +0000 UTC m=+63.586151547" Dec 11 14:15:31 crc kubenswrapper[4690]: I1211 14:15:31.977269 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:31 crc kubenswrapper[4690]: E1211 14:15:31.979386 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:32.479369642 +0000 UTC m=+64.094771285 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:31.999926 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.078593 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:32 crc kubenswrapper[4690]: E1211 14:15:32.079045 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:32.579018683 +0000 UTC m=+64.194420326 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.079142 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:32 crc kubenswrapper[4690]: E1211 14:15:32.079505 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:32.579491434 +0000 UTC m=+64.194893147 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.180170 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:32 crc kubenswrapper[4690]: E1211 14:15:32.180388 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:32.680373417 +0000 UTC m=+64.295775060 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.228011 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.228046 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.282395 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:32 crc kubenswrapper[4690]: E1211 14:15:32.282856 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:32.782839159 +0000 UTC m=+64.398240802 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.385291 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:32 crc kubenswrapper[4690]: E1211 14:15:32.385803 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:32.885771223 +0000 UTC m=+64.501172866 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.495890 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:32 crc kubenswrapper[4690]: E1211 14:15:32.496626 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:32.996581896 +0000 UTC m=+64.611983539 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.510439 4690 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 11 14:15:32 crc kubenswrapper[4690]: E1211 14:15:32.601131 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.601711 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:32 crc kubenswrapper[4690]: E1211 14:15:32.626742 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:33.126696038 +0000 UTC m=+64.742097681 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.707282 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:32 crc kubenswrapper[4690]: E1211 14:15:32.707695 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:33.207681847 +0000 UTC m=+64.823083480 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.734838 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:32 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:32 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:32 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.734896 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:32 crc kubenswrapper[4690]: E1211 14:15:32.739137 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:15:32 crc kubenswrapper[4690]: E1211 14:15:32.757349 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:15:32 crc kubenswrapper[4690]: E1211 14:15:32.757637 4690 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" podUID="7731421f-ecdd-4ff8-aa63-79ae9983425f" containerName="kube-multus-additional-cni-plugins" Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.778557 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.799457 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-mtc4n" Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.808614 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:32 crc kubenswrapper[4690]: E1211 14:15:32.811614 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-11 14:15:33.311593696 +0000 UTC m=+64.926995349 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.911816 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:32 crc kubenswrapper[4690]: E1211 14:15:32.912521 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-11 14:15:33.412508479 +0000 UTC m=+65.027910122 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-c5zwr" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.943008 4690 generic.go:334] "Generic (PLEG): container finished" podID="3f373953-70e7-4d9b-a3a1-cc35e7255c44" containerID="5b5cdf605a6e58ba7d018dd67aa6df394839825b40622538c165d79c28e3eda8" exitCode=0 Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.943079 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" event={"ID":"3f373953-70e7-4d9b-a3a1-cc35e7255c44","Type":"ContainerDied","Data":"5b5cdf605a6e58ba7d018dd67aa6df394839825b40622538c165d79c28e3eda8"} Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.951571 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"8773ec7f5cbca5ee502631792572ccbe1510a836f23fe9c705fe21b9145c8344"} Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.952286 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.961111 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a17527a0-5547-476d-96ef-beabab0cee1c","Type":"ContainerStarted","Data":"d52c9c93e0e27da5959999afbe3a535066d58311e585dbcd785b7ac371166b2f"} Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.969456 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"e4d3c537cb7cb41d7162e48e736598857fae0941aee8fe4219259d0128b74de9"} Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.988602 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.988574293 podStartE2EDuration="2.988574293s" podCreationTimestamp="2025-12-11 14:15:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:32.986645234 +0000 UTC m=+64.602046887" watchObservedRunningTime="2025-12-11 14:15:32.988574293 +0000 UTC m=+64.603975946" Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.989323 4690 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-11T14:15:32.510475268Z","Handler":null,"Name":""} Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.991723 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"c81c01060cc43d32942af8bee93764cf79134869b19a17b07e94a7024098066d"} Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.991818 4690 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.991851 4690 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 11 14:15:32 crc kubenswrapper[4690]: I1211 14:15:32.996682 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" event={"ID":"5f673e58-d96d-4194-8c5e-53be672a7996","Type":"ContainerStarted","Data":"3018a483d87f03ca5dfbe2f9cfe383eda75867c5ebda912b0845d07fde183d56"} Dec 11 14:15:33 crc kubenswrapper[4690]: I1211 14:15:33.009299 4690 generic.go:334] "Generic (PLEG): container finished" podID="09e8430b-2226-4933-9dcf-ee3b5de076c3" containerID="4ac0da69a5417465967ea7f81d4b6dd8688fc1eb132a26c3a9eff3218ddbb03d" exitCode=0 Dec 11 14:15:33 crc kubenswrapper[4690]: I1211 14:15:33.009408 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xq68s" event={"ID":"09e8430b-2226-4933-9dcf-ee3b5de076c3","Type":"ContainerDied","Data":"4ac0da69a5417465967ea7f81d4b6dd8688fc1eb132a26c3a9eff3218ddbb03d"} Dec 11 14:15:33 crc kubenswrapper[4690]: I1211 14:15:33.017191 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 11 14:15:33 crc kubenswrapper[4690]: I1211 14:15:33.022334 4690 generic.go:334] "Generic (PLEG): container finished" podID="0c61312d-523c-44a8-a451-dc96bba0f6d7" containerID="e7592b3536fe97a6b7c1e71f26da94a6920897b250944f2116074ce5f99f9db7" exitCode=0 Dec 11 14:15:33 crc kubenswrapper[4690]: I1211 14:15:33.023577 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-24xfd" event={"ID":"0c61312d-523c-44a8-a451-dc96bba0f6d7","Type":"ContainerDied","Data":"e7592b3536fe97a6b7c1e71f26da94a6920897b250944f2116074ce5f99f9db7"} Dec 11 14:15:33 crc kubenswrapper[4690]: I1211 14:15:33.029630 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 11 14:15:33 crc kubenswrapper[4690]: I1211 14:15:33.122696 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:33 crc kubenswrapper[4690]: I1211 14:15:33.127596 4690 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 11 14:15:33 crc kubenswrapper[4690]: I1211 14:15:33.127634 4690 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:33 crc kubenswrapper[4690]: I1211 14:15:33.178628 4690 patch_prober.go:28] interesting pod/apiserver-76f77b778f-r56mp container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 11 14:15:33 crc kubenswrapper[4690]: [+]log ok Dec 11 14:15:33 crc kubenswrapper[4690]: [+]etcd ok Dec 11 14:15:33 crc kubenswrapper[4690]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 11 14:15:33 crc kubenswrapper[4690]: [+]poststarthook/generic-apiserver-start-informers ok Dec 11 14:15:33 crc kubenswrapper[4690]: [+]poststarthook/max-in-flight-filter ok Dec 11 14:15:33 crc kubenswrapper[4690]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 11 14:15:33 crc kubenswrapper[4690]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 11 14:15:33 crc kubenswrapper[4690]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 11 14:15:33 crc kubenswrapper[4690]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 11 14:15:33 crc kubenswrapper[4690]: [+]poststarthook/project.openshift.io-projectcache ok Dec 11 14:15:33 crc kubenswrapper[4690]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 11 14:15:33 crc kubenswrapper[4690]: [+]poststarthook/openshift.io-startinformers ok Dec 11 14:15:33 crc kubenswrapper[4690]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 11 14:15:33 crc kubenswrapper[4690]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 11 14:15:33 crc kubenswrapper[4690]: livez check failed Dec 11 14:15:33 crc kubenswrapper[4690]: I1211 14:15:33.178689 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-r56mp" podUID="6ddc4574-895b-41fa-a8c9-47c4f303d0d9" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:33 crc kubenswrapper[4690]: I1211 14:15:33.244874 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-c5zwr\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:33 crc kubenswrapper[4690]: I1211 14:15:33.416159 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 11 14:15:33 crc kubenswrapper[4690]: I1211 14:15:33.437216 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:33 crc kubenswrapper[4690]: I1211 14:15:33.733839 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:33 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:33 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:33 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:33 crc kubenswrapper[4690]: I1211 14:15:33.734210 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.091000 4690 generic.go:334] "Generic (PLEG): container finished" podID="a17527a0-5547-476d-96ef-beabab0cee1c" containerID="d52c9c93e0e27da5959999afbe3a535066d58311e585dbcd785b7ac371166b2f" exitCode=0 Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.091135 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a17527a0-5547-476d-96ef-beabab0cee1c","Type":"ContainerDied","Data":"d52c9c93e0e27da5959999afbe3a535066d58311e585dbcd785b7ac371166b2f"} Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.098087 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" event={"ID":"5f673e58-d96d-4194-8c5e-53be672a7996","Type":"ContainerStarted","Data":"9aa8899665f1f19aad19c9e9dd3cd551ab63dd76799d886eaa4c6b1994120c75"} Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.192522 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-c5zwr"] Dec 11 14:15:34 crc kubenswrapper[4690]: W1211 14:15:34.218984 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51f5a15c_718b_4daf_9ea9_9bdd4ed84f73.slice/crio-3996c2df1064bcbf630bc1def684fa83fc4a603103179d8de91a7a991c1040fe WatchSource:0}: Error finding container 3996c2df1064bcbf630bc1def684fa83fc4a603103179d8de91a7a991c1040fe: Status 404 returned error can't find the container with id 3996c2df1064bcbf630bc1def684fa83fc4a603103179d8de91a7a991c1040fe Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.493524 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.657789 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.670854 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3f373953-70e7-4d9b-a3a1-cc35e7255c44-config-volume\") pod \"3f373953-70e7-4d9b-a3a1-cc35e7255c44\" (UID: \"3f373953-70e7-4d9b-a3a1-cc35e7255c44\") " Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.670894 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b7xzc\" (UniqueName: \"kubernetes.io/projected/3f373953-70e7-4d9b-a3a1-cc35e7255c44-kube-api-access-b7xzc\") pod \"3f373953-70e7-4d9b-a3a1-cc35e7255c44\" (UID: \"3f373953-70e7-4d9b-a3a1-cc35e7255c44\") " Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.671020 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3f373953-70e7-4d9b-a3a1-cc35e7255c44-secret-volume\") pod \"3f373953-70e7-4d9b-a3a1-cc35e7255c44\" (UID: \"3f373953-70e7-4d9b-a3a1-cc35e7255c44\") " Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.673173 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3f373953-70e7-4d9b-a3a1-cc35e7255c44-config-volume" (OuterVolumeSpecName: "config-volume") pod "3f373953-70e7-4d9b-a3a1-cc35e7255c44" (UID: "3f373953-70e7-4d9b-a3a1-cc35e7255c44"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.679356 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3f373953-70e7-4d9b-a3a1-cc35e7255c44-kube-api-access-b7xzc" (OuterVolumeSpecName: "kube-api-access-b7xzc") pod "3f373953-70e7-4d9b-a3a1-cc35e7255c44" (UID: "3f373953-70e7-4d9b-a3a1-cc35e7255c44"). InnerVolumeSpecName "kube-api-access-b7xzc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.680278 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3f373953-70e7-4d9b-a3a1-cc35e7255c44-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3f373953-70e7-4d9b-a3a1-cc35e7255c44" (UID: "3f373953-70e7-4d9b-a3a1-cc35e7255c44"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.733593 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:34 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:34 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:34 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.733663 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.772002 4690 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3f373953-70e7-4d9b-a3a1-cc35e7255c44-config-volume\") on node \"crc\" DevicePath \"\"" Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.772035 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b7xzc\" (UniqueName: \"kubernetes.io/projected/3f373953-70e7-4d9b-a3a1-cc35e7255c44-kube-api-access-b7xzc\") on node \"crc\" DevicePath \"\"" Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.772047 4690 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3f373953-70e7-4d9b-a3a1-cc35e7255c44-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 11 14:15:34 crc kubenswrapper[4690]: I1211 14:15:34.990059 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-hrsvk" Dec 11 14:15:35 crc kubenswrapper[4690]: I1211 14:15:35.108393 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" event={"ID":"3f373953-70e7-4d9b-a3a1-cc35e7255c44","Type":"ContainerDied","Data":"f3634b28c4917595e64e4a96f39ddaaaccf73486c3e07884a23357eb4016815c"} Dec 11 14:15:35 crc kubenswrapper[4690]: I1211 14:15:35.108711 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3634b28c4917595e64e4a96f39ddaaaccf73486c3e07884a23357eb4016815c" Dec 11 14:15:35 crc kubenswrapper[4690]: I1211 14:15:35.108439 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29424375-vhx57" Dec 11 14:15:35 crc kubenswrapper[4690]: I1211 14:15:35.110113 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" event={"ID":"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73","Type":"ContainerStarted","Data":"3996c2df1064bcbf630bc1def684fa83fc4a603103179d8de91a7a991c1040fe"} Dec 11 14:15:35 crc kubenswrapper[4690]: I1211 14:15:35.479716 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 14:15:35 crc kubenswrapper[4690]: I1211 14:15:35.588347 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a17527a0-5547-476d-96ef-beabab0cee1c-kube-api-access\") pod \"a17527a0-5547-476d-96ef-beabab0cee1c\" (UID: \"a17527a0-5547-476d-96ef-beabab0cee1c\") " Dec 11 14:15:35 crc kubenswrapper[4690]: I1211 14:15:35.588412 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a17527a0-5547-476d-96ef-beabab0cee1c-kubelet-dir\") pod \"a17527a0-5547-476d-96ef-beabab0cee1c\" (UID: \"a17527a0-5547-476d-96ef-beabab0cee1c\") " Dec 11 14:15:35 crc kubenswrapper[4690]: I1211 14:15:35.588777 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a17527a0-5547-476d-96ef-beabab0cee1c-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a17527a0-5547-476d-96ef-beabab0cee1c" (UID: "a17527a0-5547-476d-96ef-beabab0cee1c"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 14:15:35 crc kubenswrapper[4690]: I1211 14:15:35.607256 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a17527a0-5547-476d-96ef-beabab0cee1c-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a17527a0-5547-476d-96ef-beabab0cee1c" (UID: "a17527a0-5547-476d-96ef-beabab0cee1c"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:15:35 crc kubenswrapper[4690]: I1211 14:15:35.690310 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a17527a0-5547-476d-96ef-beabab0cee1c-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 14:15:35 crc kubenswrapper[4690]: I1211 14:15:35.690346 4690 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a17527a0-5547-476d-96ef-beabab0cee1c-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 14:15:35 crc kubenswrapper[4690]: I1211 14:15:35.737989 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:35 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:35 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:35 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:35 crc kubenswrapper[4690]: I1211 14:15:35.738040 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:36 crc kubenswrapper[4690]: I1211 14:15:36.122553 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" event={"ID":"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73","Type":"ContainerStarted","Data":"5ff716de2a7abceafdc159638b1f7ba7dd0951fec6093433c8143afb3c0f8ef8"} Dec 11 14:15:36 crc kubenswrapper[4690]: I1211 14:15:36.125636 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a17527a0-5547-476d-96ef-beabab0cee1c","Type":"ContainerDied","Data":"3e28667b0ea2a62678217b1ae99fada0c967f231c454986113281cfe93bbfeca"} Dec 11 14:15:36 crc kubenswrapper[4690]: I1211 14:15:36.125721 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e28667b0ea2a62678217b1ae99fada0c967f231c454986113281cfe93bbfeca" Dec 11 14:15:36 crc kubenswrapper[4690]: I1211 14:15:36.125851 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 11 14:15:36 crc kubenswrapper[4690]: I1211 14:15:36.728992 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:36 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:36 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:36 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:36 crc kubenswrapper[4690]: I1211 14:15:36.729044 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:37 crc kubenswrapper[4690]: I1211 14:15:37.147366 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" event={"ID":"5f673e58-d96d-4194-8c5e-53be672a7996","Type":"ContainerStarted","Data":"8cab084c2fe8dac7eb1f48998c0ba3207940f597c19ed8372c3905a1dc9282a0"} Dec 11 14:15:37 crc kubenswrapper[4690]: I1211 14:15:37.218043 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs\") pod \"network-metrics-daemon-r8sd9\" (UID: \"0cbb05eb-6650-45bc-ae3f-d29df5940583\") " pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:37 crc kubenswrapper[4690]: I1211 14:15:37.229068 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0cbb05eb-6650-45bc-ae3f-d29df5940583-metrics-certs\") pod \"network-metrics-daemon-r8sd9\" (UID: \"0cbb05eb-6650-45bc-ae3f-d29df5940583\") " pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:37 crc kubenswrapper[4690]: I1211 14:15:37.233504 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:37 crc kubenswrapper[4690]: I1211 14:15:37.243523 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-r56mp" Dec 11 14:15:37 crc kubenswrapper[4690]: I1211 14:15:37.294878 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 11 14:15:37 crc kubenswrapper[4690]: I1211 14:15:37.310358 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-r8sd9" Dec 11 14:15:37 crc kubenswrapper[4690]: I1211 14:15:37.574810 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-t9jhb" Dec 11 14:15:37 crc kubenswrapper[4690]: I1211 14:15:37.679466 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-r8sd9"] Dec 11 14:15:37 crc kubenswrapper[4690]: W1211 14:15:37.719351 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0cbb05eb_6650_45bc_ae3f_d29df5940583.slice/crio-3fe2484b7f3a61cb6cb5fe98c15635485a450ce2a268a7e65eece76bdcc09b3d WatchSource:0}: Error finding container 3fe2484b7f3a61cb6cb5fe98c15635485a450ce2a268a7e65eece76bdcc09b3d: Status 404 returned error can't find the container with id 3fe2484b7f3a61cb6cb5fe98c15635485a450ce2a268a7e65eece76bdcc09b3d Dec 11 14:15:37 crc kubenswrapper[4690]: I1211 14:15:37.728874 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:37 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:37 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:37 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:37 crc kubenswrapper[4690]: I1211 14:15:37.728926 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.159514 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-r8sd9" event={"ID":"0cbb05eb-6650-45bc-ae3f-d29df5940583","Type":"ContainerStarted","Data":"3fe2484b7f3a61cb6cb5fe98c15635485a450ce2a268a7e65eece76bdcc09b3d"} Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.182777 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" podStartSLOduration=34.182682863 podStartE2EDuration="34.182682863s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:38.18036622 +0000 UTC m=+69.795767883" watchObservedRunningTime="2025-12-11 14:15:38.182682863 +0000 UTC m=+69.798084526" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.215541 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-ktwn7" podStartSLOduration=19.215516955 podStartE2EDuration="19.215516955s" podCreationTimestamp="2025-12-11 14:15:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:38.203620852 +0000 UTC m=+69.819022495" watchObservedRunningTime="2025-12-11 14:15:38.215516955 +0000 UTC m=+69.830918598" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.244483 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 11 14:15:38 crc kubenswrapper[4690]: E1211 14:15:38.244797 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a17527a0-5547-476d-96ef-beabab0cee1c" containerName="pruner" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.244813 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="a17527a0-5547-476d-96ef-beabab0cee1c" containerName="pruner" Dec 11 14:15:38 crc kubenswrapper[4690]: E1211 14:15:38.244835 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3f373953-70e7-4d9b-a3a1-cc35e7255c44" containerName="collect-profiles" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.244843 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="3f373953-70e7-4d9b-a3a1-cc35e7255c44" containerName="collect-profiles" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.244982 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="3f373953-70e7-4d9b-a3a1-cc35e7255c44" containerName="collect-profiles" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.245291 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="a17527a0-5547-476d-96ef-beabab0cee1c" containerName="pruner" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.247036 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.254227 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.256224 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.259793 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.351338 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/01bcf2e0-005e-4e70-afa7-ad2da2521a5e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"01bcf2e0-005e-4e70-afa7-ad2da2521a5e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.351407 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/01bcf2e0-005e-4e70-afa7-ad2da2521a5e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"01bcf2e0-005e-4e70-afa7-ad2da2521a5e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.453340 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/01bcf2e0-005e-4e70-afa7-ad2da2521a5e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"01bcf2e0-005e-4e70-afa7-ad2da2521a5e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.453464 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/01bcf2e0-005e-4e70-afa7-ad2da2521a5e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"01bcf2e0-005e-4e70-afa7-ad2da2521a5e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.453470 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/01bcf2e0-005e-4e70-afa7-ad2da2521a5e-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"01bcf2e0-005e-4e70-afa7-ad2da2521a5e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.483314 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/01bcf2e0-005e-4e70-afa7-ad2da2521a5e-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"01bcf2e0-005e-4e70-afa7-ad2da2521a5e\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.594751 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.736053 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:38 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:38 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:38 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.736128 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:38 crc kubenswrapper[4690]: I1211 14:15:38.932220 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 11 14:15:39 crc kubenswrapper[4690]: I1211 14:15:39.176486 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"01bcf2e0-005e-4e70-afa7-ad2da2521a5e","Type":"ContainerStarted","Data":"b33f2db1850e7b24ace07f5877394fec0e6a0d410d9e4163205b0fb5e2cb9e21"} Dec 11 14:15:39 crc kubenswrapper[4690]: I1211 14:15:39.729066 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:39 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:39 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:39 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:39 crc kubenswrapper[4690]: I1211 14:15:39.729315 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:40 crc kubenswrapper[4690]: I1211 14:15:40.196839 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"01bcf2e0-005e-4e70-afa7-ad2da2521a5e","Type":"ContainerStarted","Data":"cb19552e7d2b3073a826d73f0d369c47a2d84c893ada2de438810df58767365c"} Dec 11 14:15:40 crc kubenswrapper[4690]: I1211 14:15:40.199232 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-r8sd9" event={"ID":"0cbb05eb-6650-45bc-ae3f-d29df5940583","Type":"ContainerStarted","Data":"9d23a62a13b384b990e9c3bb4a0d43523bfc27b3199522feeac0cd9904a6d3f2"} Dec 11 14:15:40 crc kubenswrapper[4690]: I1211 14:15:40.213865 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.213845415 podStartE2EDuration="2.213845415s" podCreationTimestamp="2025-12-11 14:15:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:40.210591777 +0000 UTC m=+71.825993440" watchObservedRunningTime="2025-12-11 14:15:40.213845415 +0000 UTC m=+71.829247058" Dec 11 14:15:40 crc kubenswrapper[4690]: I1211 14:15:40.728823 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:40 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:40 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:40 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:40 crc kubenswrapper[4690]: I1211 14:15:40.729128 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:41 crc kubenswrapper[4690]: I1211 14:15:41.212745 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-r8sd9" event={"ID":"0cbb05eb-6650-45bc-ae3f-d29df5940583","Type":"ContainerStarted","Data":"afa66a84e82393590937391e45248615aea6b020fed607958f054bd3c95267d1"} Dec 11 14:15:41 crc kubenswrapper[4690]: I1211 14:15:41.215551 4690 generic.go:334] "Generic (PLEG): container finished" podID="01bcf2e0-005e-4e70-afa7-ad2da2521a5e" containerID="cb19552e7d2b3073a826d73f0d369c47a2d84c893ada2de438810df58767365c" exitCode=0 Dec 11 14:15:41 crc kubenswrapper[4690]: I1211 14:15:41.215609 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"01bcf2e0-005e-4e70-afa7-ad2da2521a5e","Type":"ContainerDied","Data":"cb19552e7d2b3073a826d73f0d369c47a2d84c893ada2de438810df58767365c"} Dec 11 14:15:41 crc kubenswrapper[4690]: I1211 14:15:41.248560 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-r8sd9" podStartSLOduration=37.24851429 podStartE2EDuration="37.24851429s" podCreationTimestamp="2025-12-11 14:15:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:15:41.23451207 +0000 UTC m=+72.849913733" watchObservedRunningTime="2025-12-11 14:15:41.24851429 +0000 UTC m=+72.863915943" Dec 11 14:15:41 crc kubenswrapper[4690]: I1211 14:15:41.624096 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-8klv5" Dec 11 14:15:41 crc kubenswrapper[4690]: I1211 14:15:41.662970 4690 patch_prober.go:28] interesting pod/console-f9d7485db-rb4j9 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.19:8443/health\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Dec 11 14:15:41 crc kubenswrapper[4690]: I1211 14:15:41.663023 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-rb4j9" podUID="fbdc74c8-8b96-479b-b06c-637acb1bb68a" containerName="console" probeResult="failure" output="Get \"https://10.217.0.19:8443/health\": dial tcp 10.217.0.19:8443: connect: connection refused" Dec 11 14:15:41 crc kubenswrapper[4690]: I1211 14:15:41.728875 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:41 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:41 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:41 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:41 crc kubenswrapper[4690]: I1211 14:15:41.728967 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:42 crc kubenswrapper[4690]: I1211 14:15:42.456038 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 14:15:42 crc kubenswrapper[4690]: I1211 14:15:42.525035 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/01bcf2e0-005e-4e70-afa7-ad2da2521a5e-kube-api-access\") pod \"01bcf2e0-005e-4e70-afa7-ad2da2521a5e\" (UID: \"01bcf2e0-005e-4e70-afa7-ad2da2521a5e\") " Dec 11 14:15:42 crc kubenswrapper[4690]: I1211 14:15:42.525099 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/01bcf2e0-005e-4e70-afa7-ad2da2521a5e-kubelet-dir\") pod \"01bcf2e0-005e-4e70-afa7-ad2da2521a5e\" (UID: \"01bcf2e0-005e-4e70-afa7-ad2da2521a5e\") " Dec 11 14:15:42 crc kubenswrapper[4690]: I1211 14:15:42.525252 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/01bcf2e0-005e-4e70-afa7-ad2da2521a5e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "01bcf2e0-005e-4e70-afa7-ad2da2521a5e" (UID: "01bcf2e0-005e-4e70-afa7-ad2da2521a5e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 14:15:42 crc kubenswrapper[4690]: I1211 14:15:42.525465 4690 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/01bcf2e0-005e-4e70-afa7-ad2da2521a5e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 14:15:42 crc kubenswrapper[4690]: I1211 14:15:42.532222 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01bcf2e0-005e-4e70-afa7-ad2da2521a5e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "01bcf2e0-005e-4e70-afa7-ad2da2521a5e" (UID: "01bcf2e0-005e-4e70-afa7-ad2da2521a5e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:15:42 crc kubenswrapper[4690]: E1211 14:15:42.575830 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:15:42 crc kubenswrapper[4690]: E1211 14:15:42.579068 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:15:42 crc kubenswrapper[4690]: E1211 14:15:42.581388 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:15:42 crc kubenswrapper[4690]: E1211 14:15:42.581440 4690 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" podUID="7731421f-ecdd-4ff8-aa63-79ae9983425f" containerName="kube-multus-additional-cni-plugins" Dec 11 14:15:42 crc kubenswrapper[4690]: I1211 14:15:42.628236 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/01bcf2e0-005e-4e70-afa7-ad2da2521a5e-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 14:15:42 crc kubenswrapper[4690]: I1211 14:15:42.729307 4690 patch_prober.go:28] interesting pod/router-default-5444994796-jzm89 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 11 14:15:42 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Dec 11 14:15:42 crc kubenswrapper[4690]: [+]process-running ok Dec 11 14:15:42 crc kubenswrapper[4690]: healthz check failed Dec 11 14:15:42 crc kubenswrapper[4690]: I1211 14:15:42.729376 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-jzm89" podUID="9211752e-9c0b-43ea-9c4d-5d91fcb472db" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 11 14:15:43 crc kubenswrapper[4690]: I1211 14:15:43.229535 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"01bcf2e0-005e-4e70-afa7-ad2da2521a5e","Type":"ContainerDied","Data":"b33f2db1850e7b24ace07f5877394fec0e6a0d410d9e4163205b0fb5e2cb9e21"} Dec 11 14:15:43 crc kubenswrapper[4690]: I1211 14:15:43.229688 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 11 14:15:43 crc kubenswrapper[4690]: I1211 14:15:43.229585 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b33f2db1850e7b24ace07f5877394fec0e6a0d410d9e4163205b0fb5e2cb9e21" Dec 11 14:15:43 crc kubenswrapper[4690]: I1211 14:15:43.440372 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:15:43 crc kubenswrapper[4690]: I1211 14:15:43.734541 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:43 crc kubenswrapper[4690]: I1211 14:15:43.742786 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-jzm89" Dec 11 14:15:51 crc kubenswrapper[4690]: I1211 14:15:51.680998 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:51 crc kubenswrapper[4690]: I1211 14:15:51.687926 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-rb4j9" Dec 11 14:15:52 crc kubenswrapper[4690]: E1211 14:15:52.575011 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:15:52 crc kubenswrapper[4690]: E1211 14:15:52.577019 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:15:52 crc kubenswrapper[4690]: E1211 14:15:52.578365 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:15:52 crc kubenswrapper[4690]: E1211 14:15:52.578431 4690 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" podUID="7731421f-ecdd-4ff8-aa63-79ae9983425f" containerName="kube-multus-additional-cni-plugins" Dec 11 14:15:53 crc kubenswrapper[4690]: I1211 14:15:53.445983 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:16:01 crc kubenswrapper[4690]: I1211 14:16:01.326922 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-vl4d4_7731421f-ecdd-4ff8-aa63-79ae9983425f/kube-multus-additional-cni-plugins/0.log" Dec 11 14:16:01 crc kubenswrapper[4690]: I1211 14:16:01.327399 4690 generic.go:334] "Generic (PLEG): container finished" podID="7731421f-ecdd-4ff8-aa63-79ae9983425f" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" exitCode=137 Dec 11 14:16:01 crc kubenswrapper[4690]: I1211 14:16:01.327425 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" event={"ID":"7731421f-ecdd-4ff8-aa63-79ae9983425f","Type":"ContainerDied","Data":"dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c"} Dec 11 14:16:02 crc kubenswrapper[4690]: I1211 14:16:02.454239 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-sgjk8" Dec 11 14:16:02 crc kubenswrapper[4690]: E1211 14:16:02.572540 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:02 crc kubenswrapper[4690]: E1211 14:16:02.573046 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:02 crc kubenswrapper[4690]: E1211 14:16:02.573464 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:02 crc kubenswrapper[4690]: E1211 14:16:02.573490 4690 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" podUID="7731421f-ecdd-4ff8-aa63-79ae9983425f" containerName="kube-multus-additional-cni-plugins" Dec 11 14:16:03 crc kubenswrapper[4690]: I1211 14:16:03.649378 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 11 14:16:08 crc kubenswrapper[4690]: I1211 14:16:08.647577 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=5.647561427 podStartE2EDuration="5.647561427s" podCreationTimestamp="2025-12-11 14:16:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:16:08.644785341 +0000 UTC m=+100.260187004" watchObservedRunningTime="2025-12-11 14:16:08.647561427 +0000 UTC m=+100.262963070" Dec 11 14:16:09 crc kubenswrapper[4690]: I1211 14:16:09.441999 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 11 14:16:09 crc kubenswrapper[4690]: E1211 14:16:09.442449 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01bcf2e0-005e-4e70-afa7-ad2da2521a5e" containerName="pruner" Dec 11 14:16:09 crc kubenswrapper[4690]: I1211 14:16:09.442466 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="01bcf2e0-005e-4e70-afa7-ad2da2521a5e" containerName="pruner" Dec 11 14:16:09 crc kubenswrapper[4690]: I1211 14:16:09.442715 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="01bcf2e0-005e-4e70-afa7-ad2da2521a5e" containerName="pruner" Dec 11 14:16:09 crc kubenswrapper[4690]: I1211 14:16:09.443397 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 14:16:09 crc kubenswrapper[4690]: I1211 14:16:09.454226 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 11 14:16:09 crc kubenswrapper[4690]: I1211 14:16:09.454625 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 11 14:16:09 crc kubenswrapper[4690]: I1211 14:16:09.457224 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 11 14:16:09 crc kubenswrapper[4690]: I1211 14:16:09.596090 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0e36d4d2-cf14-40b1-897b-2704c32e12c4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0e36d4d2-cf14-40b1-897b-2704c32e12c4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 14:16:09 crc kubenswrapper[4690]: I1211 14:16:09.596225 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0e36d4d2-cf14-40b1-897b-2704c32e12c4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0e36d4d2-cf14-40b1-897b-2704c32e12c4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 14:16:09 crc kubenswrapper[4690]: I1211 14:16:09.698975 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0e36d4d2-cf14-40b1-897b-2704c32e12c4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0e36d4d2-cf14-40b1-897b-2704c32e12c4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 14:16:09 crc kubenswrapper[4690]: I1211 14:16:09.699076 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0e36d4d2-cf14-40b1-897b-2704c32e12c4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0e36d4d2-cf14-40b1-897b-2704c32e12c4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 14:16:09 crc kubenswrapper[4690]: I1211 14:16:09.699091 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0e36d4d2-cf14-40b1-897b-2704c32e12c4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0e36d4d2-cf14-40b1-897b-2704c32e12c4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 14:16:09 crc kubenswrapper[4690]: I1211 14:16:09.723488 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0e36d4d2-cf14-40b1-897b-2704c32e12c4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0e36d4d2-cf14-40b1-897b-2704c32e12c4\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 14:16:09 crc kubenswrapper[4690]: I1211 14:16:09.780544 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 14:16:11 crc kubenswrapper[4690]: I1211 14:16:11.586455 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 11 14:16:12 crc kubenswrapper[4690]: E1211 14:16:12.571635 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:12 crc kubenswrapper[4690]: E1211 14:16:12.571922 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:12 crc kubenswrapper[4690]: E1211 14:16:12.572189 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:12 crc kubenswrapper[4690]: E1211 14:16:12.572241 4690 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" podUID="7731421f-ecdd-4ff8-aa63-79ae9983425f" containerName="kube-multus-additional-cni-plugins" Dec 11 14:16:13 crc kubenswrapper[4690]: I1211 14:16:13.839761 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 11 14:16:13 crc kubenswrapper[4690]: I1211 14:16:13.841112 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 14:16:13 crc kubenswrapper[4690]: I1211 14:16:13.850937 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 11 14:16:13 crc kubenswrapper[4690]: I1211 14:16:13.857361 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/64df140d-3126-42f9-b4d2-a3488a27fb57-var-lock\") pod \"installer-9-crc\" (UID: \"64df140d-3126-42f9-b4d2-a3488a27fb57\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 14:16:13 crc kubenswrapper[4690]: I1211 14:16:13.857405 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64df140d-3126-42f9-b4d2-a3488a27fb57-kube-api-access\") pod \"installer-9-crc\" (UID: \"64df140d-3126-42f9-b4d2-a3488a27fb57\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 14:16:13 crc kubenswrapper[4690]: I1211 14:16:13.857427 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/64df140d-3126-42f9-b4d2-a3488a27fb57-kubelet-dir\") pod \"installer-9-crc\" (UID: \"64df140d-3126-42f9-b4d2-a3488a27fb57\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 14:16:13 crc kubenswrapper[4690]: I1211 14:16:13.958494 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/64df140d-3126-42f9-b4d2-a3488a27fb57-var-lock\") pod \"installer-9-crc\" (UID: \"64df140d-3126-42f9-b4d2-a3488a27fb57\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 14:16:13 crc kubenswrapper[4690]: I1211 14:16:13.958543 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64df140d-3126-42f9-b4d2-a3488a27fb57-kube-api-access\") pod \"installer-9-crc\" (UID: \"64df140d-3126-42f9-b4d2-a3488a27fb57\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 14:16:13 crc kubenswrapper[4690]: I1211 14:16:13.958564 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/64df140d-3126-42f9-b4d2-a3488a27fb57-kubelet-dir\") pod \"installer-9-crc\" (UID: \"64df140d-3126-42f9-b4d2-a3488a27fb57\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 14:16:13 crc kubenswrapper[4690]: I1211 14:16:13.958682 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/64df140d-3126-42f9-b4d2-a3488a27fb57-kubelet-dir\") pod \"installer-9-crc\" (UID: \"64df140d-3126-42f9-b4d2-a3488a27fb57\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 14:16:13 crc kubenswrapper[4690]: I1211 14:16:13.958721 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/64df140d-3126-42f9-b4d2-a3488a27fb57-var-lock\") pod \"installer-9-crc\" (UID: \"64df140d-3126-42f9-b4d2-a3488a27fb57\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 14:16:13 crc kubenswrapper[4690]: I1211 14:16:13.975529 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64df140d-3126-42f9-b4d2-a3488a27fb57-kube-api-access\") pod \"installer-9-crc\" (UID: \"64df140d-3126-42f9-b4d2-a3488a27fb57\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 11 14:16:14 crc kubenswrapper[4690]: I1211 14:16:14.163004 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 14:16:22 crc kubenswrapper[4690]: E1211 14:16:22.571766 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:22 crc kubenswrapper[4690]: E1211 14:16:22.574016 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:22 crc kubenswrapper[4690]: E1211 14:16:22.574518 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:22 crc kubenswrapper[4690]: E1211 14:16:22.574582 4690 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" podUID="7731421f-ecdd-4ff8-aa63-79ae9983425f" containerName="kube-multus-additional-cni-plugins" Dec 11 14:16:32 crc kubenswrapper[4690]: E1211 14:16:32.572117 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:32 crc kubenswrapper[4690]: E1211 14:16:32.572991 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:32 crc kubenswrapper[4690]: E1211 14:16:32.573255 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:32 crc kubenswrapper[4690]: E1211 14:16:32.573286 4690 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" podUID="7731421f-ecdd-4ff8-aa63-79ae9983425f" containerName="kube-multus-additional-cni-plugins" Dec 11 14:16:42 crc kubenswrapper[4690]: E1211 14:16:42.572109 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:42 crc kubenswrapper[4690]: E1211 14:16:42.573055 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:42 crc kubenswrapper[4690]: E1211 14:16:42.573475 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:42 crc kubenswrapper[4690]: E1211 14:16:42.573543 4690 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" podUID="7731421f-ecdd-4ff8-aa63-79ae9983425f" containerName="kube-multus-additional-cni-plugins" Dec 11 14:16:52 crc kubenswrapper[4690]: E1211 14:16:52.572420 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:52 crc kubenswrapper[4690]: E1211 14:16:52.576081 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:52 crc kubenswrapper[4690]: E1211 14:16:52.576789 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" cmd=["/bin/bash","-c","test -f /ready/ready"] Dec 11 14:16:52 crc kubenswrapper[4690]: E1211 14:16:52.576881 4690 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c is running failed: container process not found" probeType="Readiness" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" podUID="7731421f-ecdd-4ff8-aa63-79ae9983425f" containerName="kube-multus-additional-cni-plugins" Dec 11 14:16:53 crc kubenswrapper[4690]: I1211 14:16:53.992036 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-vl4d4_7731421f-ecdd-4ff8-aa63-79ae9983425f/kube-multus-additional-cni-plugins/0.log" Dec 11 14:16:53 crc kubenswrapper[4690]: I1211 14:16:53.992107 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.181796 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/7731421f-ecdd-4ff8-aa63-79ae9983425f-ready\") pod \"7731421f-ecdd-4ff8-aa63-79ae9983425f\" (UID: \"7731421f-ecdd-4ff8-aa63-79ae9983425f\") " Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.182030 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7731421f-ecdd-4ff8-aa63-79ae9983425f-tuning-conf-dir\") pod \"7731421f-ecdd-4ff8-aa63-79ae9983425f\" (UID: \"7731421f-ecdd-4ff8-aa63-79ae9983425f\") " Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.182072 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86vrb\" (UniqueName: \"kubernetes.io/projected/7731421f-ecdd-4ff8-aa63-79ae9983425f-kube-api-access-86vrb\") pod \"7731421f-ecdd-4ff8-aa63-79ae9983425f\" (UID: \"7731421f-ecdd-4ff8-aa63-79ae9983425f\") " Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.182107 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7731421f-ecdd-4ff8-aa63-79ae9983425f-cni-sysctl-allowlist\") pod \"7731421f-ecdd-4ff8-aa63-79ae9983425f\" (UID: \"7731421f-ecdd-4ff8-aa63-79ae9983425f\") " Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.182204 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7731421f-ecdd-4ff8-aa63-79ae9983425f-tuning-conf-dir" (OuterVolumeSpecName: "tuning-conf-dir") pod "7731421f-ecdd-4ff8-aa63-79ae9983425f" (UID: "7731421f-ecdd-4ff8-aa63-79ae9983425f"). InnerVolumeSpecName "tuning-conf-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.182536 4690 reconciler_common.go:293] "Volume detached for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/7731421f-ecdd-4ff8-aa63-79ae9983425f-tuning-conf-dir\") on node \"crc\" DevicePath \"\"" Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.182786 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7731421f-ecdd-4ff8-aa63-79ae9983425f-ready" (OuterVolumeSpecName: "ready") pod "7731421f-ecdd-4ff8-aa63-79ae9983425f" (UID: "7731421f-ecdd-4ff8-aa63-79ae9983425f"). InnerVolumeSpecName "ready". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.183442 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7731421f-ecdd-4ff8-aa63-79ae9983425f-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7731421f-ecdd-4ff8-aa63-79ae9983425f" (UID: "7731421f-ecdd-4ff8-aa63-79ae9983425f"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.190706 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7731421f-ecdd-4ff8-aa63-79ae9983425f-kube-api-access-86vrb" (OuterVolumeSpecName: "kube-api-access-86vrb") pod "7731421f-ecdd-4ff8-aa63-79ae9983425f" (UID: "7731421f-ecdd-4ff8-aa63-79ae9983425f"). InnerVolumeSpecName "kube-api-access-86vrb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.283424 4690 reconciler_common.go:293] "Volume detached for volume \"ready\" (UniqueName: \"kubernetes.io/empty-dir/7731421f-ecdd-4ff8-aa63-79ae9983425f-ready\") on node \"crc\" DevicePath \"\"" Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.283471 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86vrb\" (UniqueName: \"kubernetes.io/projected/7731421f-ecdd-4ff8-aa63-79ae9983425f-kube-api-access-86vrb\") on node \"crc\" DevicePath \"\"" Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.283482 4690 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7731421f-ecdd-4ff8-aa63-79ae9983425f-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.603182 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_cni-sysctl-allowlist-ds-vl4d4_7731421f-ecdd-4ff8-aa63-79ae9983425f/kube-multus-additional-cni-plugins/0.log" Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.603265 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" event={"ID":"7731421f-ecdd-4ff8-aa63-79ae9983425f","Type":"ContainerDied","Data":"2a89d8f5c632a1757547335790301c5df242adb30d33d9da36742a86e78b3af6"} Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.603314 4690 scope.go:117] "RemoveContainer" containerID="dfabfa4fe6d94da3891226598055bbe69bdecb0e7717e5e56efc3daf199d207c" Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.603331 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-multus/cni-sysctl-allowlist-ds-vl4d4" Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.628813 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-vl4d4"] Dec 11 14:16:54 crc kubenswrapper[4690]: I1211 14:16:54.636997 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-multus/cni-sysctl-allowlist-ds-vl4d4"] Dec 11 14:16:56 crc kubenswrapper[4690]: I1211 14:16:56.638127 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7731421f-ecdd-4ff8-aa63-79ae9983425f" path="/var/lib/kubelet/pods/7731421f-ecdd-4ff8-aa63-79ae9983425f/volumes" Dec 11 14:17:04 crc kubenswrapper[4690]: E1211 14:17:04.463661 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 11 14:17:04 crc kubenswrapper[4690]: E1211 14:17:04.464700 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-knccb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-fs97w_openshift-marketplace(8cd40416-92d2-41ec-b6ae-ba668ccc5685): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 14:17:04 crc kubenswrapper[4690]: E1211 14:17:04.465850 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-fs97w" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" Dec 11 14:17:05 crc kubenswrapper[4690]: E1211 14:17:05.079028 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-fs97w" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" Dec 11 14:17:05 crc kubenswrapper[4690]: I1211 14:17:05.523494 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 11 14:17:05 crc kubenswrapper[4690]: E1211 14:17:05.646102 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 11 14:17:05 crc kubenswrapper[4690]: E1211 14:17:05.646271 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v57hq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-8bfnc_openshift-marketplace(646a2a29-480a-4725-9407-80d8a4f2a4bb): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 14:17:05 crc kubenswrapper[4690]: E1211 14:17:05.647529 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-8bfnc" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" Dec 11 14:17:34 crc kubenswrapper[4690]: I1211 14:17:34.627385 4690 patch_prober.go:28] interesting pod/machine-config-daemon-z9662 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 14:17:34 crc kubenswrapper[4690]: I1211 14:17:34.630140 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z9662" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 14:17:40 crc kubenswrapper[4690]: E1211 14:17:40.451788 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 11 14:17:40 crc kubenswrapper[4690]: E1211 14:17:40.452379 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zfw4x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-xq68s_openshift-marketplace(09e8430b-2226-4933-9dcf-ee3b5de076c3): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 14:17:40 crc kubenswrapper[4690]: E1211 14:17:40.453567 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-xq68s" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" Dec 11 14:17:43 crc kubenswrapper[4690]: E1211 14:17:43.146920 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-xq68s" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" Dec 11 14:17:43 crc kubenswrapper[4690]: E1211 14:17:43.230602 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 11 14:17:43 crc kubenswrapper[4690]: E1211 14:17:43.231774 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wxxrx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-24xfd_openshift-marketplace(0c61312d-523c-44a8-a451-dc96bba0f6d7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 14:17:43 crc kubenswrapper[4690]: E1211 14:17:43.233099 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-24xfd" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" Dec 11 14:17:43 crc kubenswrapper[4690]: E1211 14:17:43.251715 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 11 14:17:43 crc kubenswrapper[4690]: E1211 14:17:43.251879 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j96wl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-kmjlt_openshift-marketplace(907b18d4-d2b5-47c9-9c70-716bd64330ae): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 14:17:43 crc kubenswrapper[4690]: E1211 14:17:43.252991 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-kmjlt" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" Dec 11 14:17:43 crc kubenswrapper[4690]: E1211 14:17:43.291544 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 11 14:17:43 crc kubenswrapper[4690]: E1211 14:17:43.291729 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-klnm2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-djmxn_openshift-marketplace(9fca9021-9a9f-4d5d-9892-37f39c580323): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 14:17:43 crc kubenswrapper[4690]: E1211 14:17:43.292988 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-djmxn" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" Dec 11 14:17:43 crc kubenswrapper[4690]: I1211 14:17:43.845449 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0e36d4d2-cf14-40b1-897b-2704c32e12c4","Type":"ContainerStarted","Data":"e90c0466382755dfc7486486766c148aeb90f16b0ec5b4cc3f6bf5da29de7c4c"} Dec 11 14:17:44 crc kubenswrapper[4690]: E1211 14:17:44.305719 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-djmxn" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" Dec 11 14:17:44 crc kubenswrapper[4690]: E1211 14:17:44.305797 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-kmjlt" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" Dec 11 14:17:44 crc kubenswrapper[4690]: E1211 14:17:44.307474 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-24xfd" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" Dec 11 14:17:44 crc kubenswrapper[4690]: E1211 14:17:44.381119 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 11 14:17:44 crc kubenswrapper[4690]: E1211 14:17:44.381530 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8tc9w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-r7754_openshift-marketplace(ffb685f2-c2fc-4602-8d81-5f11b6581f29): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 14:17:44 crc kubenswrapper[4690]: E1211 14:17:44.383706 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-r7754" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" Dec 11 14:17:44 crc kubenswrapper[4690]: E1211 14:17:44.478006 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 11 14:17:44 crc kubenswrapper[4690]: E1211 14:17:44.478220 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pwgxp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-k2lrt_openshift-marketplace(213bdee3-5fb6-4221-819e-43ec7a01f555): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 11 14:17:44 crc kubenswrapper[4690]: E1211 14:17:44.479341 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-k2lrt" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" Dec 11 14:17:44 crc kubenswrapper[4690]: W1211 14:17:44.817435 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod64df140d_3126_42f9_b4d2_a3488a27fb57.slice/crio-c6cf04c44fe0bebecb993ae42dff10f6a7abeed6146f73d64cdb5f3e25b29f53 WatchSource:0}: Error finding container c6cf04c44fe0bebecb993ae42dff10f6a7abeed6146f73d64cdb5f3e25b29f53: Status 404 returned error can't find the container with id c6cf04c44fe0bebecb993ae42dff10f6a7abeed6146f73d64cdb5f3e25b29f53 Dec 11 14:17:44 crc kubenswrapper[4690]: I1211 14:17:44.817942 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 11 14:17:44 crc kubenswrapper[4690]: I1211 14:17:44.851883 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"64df140d-3126-42f9-b4d2-a3488a27fb57","Type":"ContainerStarted","Data":"c6cf04c44fe0bebecb993ae42dff10f6a7abeed6146f73d64cdb5f3e25b29f53"} Dec 11 14:17:44 crc kubenswrapper[4690]: E1211 14:17:44.853470 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-r7754" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" Dec 11 14:17:44 crc kubenswrapper[4690]: E1211 14:17:44.853525 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-k2lrt" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" Dec 11 14:17:45 crc kubenswrapper[4690]: I1211 14:17:45.862020 4690 generic.go:334] "Generic (PLEG): container finished" podID="646a2a29-480a-4725-9407-80d8a4f2a4bb" containerID="ba6527c8cd588d8d6d94596f287bcb155c65b626007d0dde03a7f497db1bffc6" exitCode=0 Dec 11 14:17:45 crc kubenswrapper[4690]: I1211 14:17:45.862278 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bfnc" event={"ID":"646a2a29-480a-4725-9407-80d8a4f2a4bb","Type":"ContainerDied","Data":"ba6527c8cd588d8d6d94596f287bcb155c65b626007d0dde03a7f497db1bffc6"} Dec 11 14:17:45 crc kubenswrapper[4690]: I1211 14:17:45.865558 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"64df140d-3126-42f9-b4d2-a3488a27fb57","Type":"ContainerStarted","Data":"22af00ca16bc55b53d6dc08326b573905c5ccf753f5dcae674503f0d098e100e"} Dec 11 14:17:45 crc kubenswrapper[4690]: I1211 14:17:45.869120 4690 generic.go:334] "Generic (PLEG): container finished" podID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" containerID="4f017cb839d1c26b09b8a30b89ce3fe0695de7014356a1205e85377e27fb6986" exitCode=0 Dec 11 14:17:45 crc kubenswrapper[4690]: I1211 14:17:45.869194 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fs97w" event={"ID":"8cd40416-92d2-41ec-b6ae-ba668ccc5685","Type":"ContainerDied","Data":"4f017cb839d1c26b09b8a30b89ce3fe0695de7014356a1205e85377e27fb6986"} Dec 11 14:17:45 crc kubenswrapper[4690]: I1211 14:17:45.871381 4690 generic.go:334] "Generic (PLEG): container finished" podID="0e36d4d2-cf14-40b1-897b-2704c32e12c4" containerID="26581cb7d1d0f1c8712e0fd0a260b8f439df87c226e5d5e22766bf8cbb6c6809" exitCode=0 Dec 11 14:17:45 crc kubenswrapper[4690]: I1211 14:17:45.871410 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0e36d4d2-cf14-40b1-897b-2704c32e12c4","Type":"ContainerDied","Data":"26581cb7d1d0f1c8712e0fd0a260b8f439df87c226e5d5e22766bf8cbb6c6809"} Dec 11 14:17:45 crc kubenswrapper[4690]: I1211 14:17:45.897273 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=92.897253916 podStartE2EDuration="1m32.897253916s" podCreationTimestamp="2025-12-11 14:16:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:17:45.894372682 +0000 UTC m=+197.509774335" watchObservedRunningTime="2025-12-11 14:17:45.897253916 +0000 UTC m=+197.512655559" Dec 11 14:17:47 crc kubenswrapper[4690]: I1211 14:17:47.102377 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 14:17:47 crc kubenswrapper[4690]: I1211 14:17:47.150838 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0e36d4d2-cf14-40b1-897b-2704c32e12c4-kubelet-dir\") pod \"0e36d4d2-cf14-40b1-897b-2704c32e12c4\" (UID: \"0e36d4d2-cf14-40b1-897b-2704c32e12c4\") " Dec 11 14:17:47 crc kubenswrapper[4690]: I1211 14:17:47.150998 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0e36d4d2-cf14-40b1-897b-2704c32e12c4-kube-api-access\") pod \"0e36d4d2-cf14-40b1-897b-2704c32e12c4\" (UID: \"0e36d4d2-cf14-40b1-897b-2704c32e12c4\") " Dec 11 14:17:47 crc kubenswrapper[4690]: I1211 14:17:47.152453 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0e36d4d2-cf14-40b1-897b-2704c32e12c4-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "0e36d4d2-cf14-40b1-897b-2704c32e12c4" (UID: "0e36d4d2-cf14-40b1-897b-2704c32e12c4"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 14:17:47 crc kubenswrapper[4690]: I1211 14:17:47.158823 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e36d4d2-cf14-40b1-897b-2704c32e12c4-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0e36d4d2-cf14-40b1-897b-2704c32e12c4" (UID: "0e36d4d2-cf14-40b1-897b-2704c32e12c4"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:17:47 crc kubenswrapper[4690]: I1211 14:17:47.252153 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0e36d4d2-cf14-40b1-897b-2704c32e12c4-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 14:17:47 crc kubenswrapper[4690]: I1211 14:17:47.252178 4690 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0e36d4d2-cf14-40b1-897b-2704c32e12c4-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 14:17:47 crc kubenswrapper[4690]: I1211 14:17:47.885753 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0e36d4d2-cf14-40b1-897b-2704c32e12c4","Type":"ContainerDied","Data":"e90c0466382755dfc7486486766c148aeb90f16b0ec5b4cc3f6bf5da29de7c4c"} Dec 11 14:17:47 crc kubenswrapper[4690]: I1211 14:17:47.885794 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e90c0466382755dfc7486486766c148aeb90f16b0ec5b4cc3f6bf5da29de7c4c" Dec 11 14:17:47 crc kubenswrapper[4690]: I1211 14:17:47.885850 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 11 14:17:47 crc kubenswrapper[4690]: I1211 14:17:47.889160 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bfnc" event={"ID":"646a2a29-480a-4725-9407-80d8a4f2a4bb","Type":"ContainerStarted","Data":"cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503"} Dec 11 14:17:47 crc kubenswrapper[4690]: I1211 14:17:47.891682 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fs97w" event={"ID":"8cd40416-92d2-41ec-b6ae-ba668ccc5685","Type":"ContainerStarted","Data":"b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806"} Dec 11 14:17:47 crc kubenswrapper[4690]: I1211 14:17:47.908579 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-8bfnc" podStartSLOduration=3.83604245 podStartE2EDuration="2m20.908456522s" podCreationTimestamp="2025-12-11 14:15:27 +0000 UTC" firstStartedPulling="2025-12-11 14:15:29.523629956 +0000 UTC m=+61.139031599" lastFinishedPulling="2025-12-11 14:17:46.596044028 +0000 UTC m=+198.211445671" observedRunningTime="2025-12-11 14:17:47.905564375 +0000 UTC m=+199.520966018" watchObservedRunningTime="2025-12-11 14:17:47.908456522 +0000 UTC m=+199.523858165" Dec 11 14:17:47 crc kubenswrapper[4690]: I1211 14:17:47.929885 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fs97w" podStartSLOduration=3.9204028749999997 podStartE2EDuration="2m20.929859521s" podCreationTimestamp="2025-12-11 14:15:27 +0000 UTC" firstStartedPulling="2025-12-11 14:15:29.64516051 +0000 UTC m=+61.260562153" lastFinishedPulling="2025-12-11 14:17:46.654617156 +0000 UTC m=+198.270018799" observedRunningTime="2025-12-11 14:17:47.926017549 +0000 UTC m=+199.541419202" watchObservedRunningTime="2025-12-11 14:17:47.929859521 +0000 UTC m=+199.545261194" Dec 11 14:17:57 crc kubenswrapper[4690]: I1211 14:17:57.577964 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-8bfnc" Dec 11 14:17:57 crc kubenswrapper[4690]: I1211 14:17:57.579464 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-8bfnc" Dec 11 14:17:57 crc kubenswrapper[4690]: I1211 14:17:57.779005 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fs97w" Dec 11 14:17:57 crc kubenswrapper[4690]: I1211 14:17:57.779049 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fs97w" Dec 11 14:17:59 crc kubenswrapper[4690]: I1211 14:17:59.588916 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fs97w" Dec 11 14:17:59 crc kubenswrapper[4690]: I1211 14:17:59.590295 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-8bfnc" Dec 11 14:17:59 crc kubenswrapper[4690]: I1211 14:17:59.639924 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fs97w" Dec 11 14:17:59 crc kubenswrapper[4690]: I1211 14:17:59.644092 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-8bfnc" Dec 11 14:18:03 crc kubenswrapper[4690]: I1211 14:18:03.979521 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djmxn" event={"ID":"9fca9021-9a9f-4d5d-9892-37f39c580323","Type":"ContainerStarted","Data":"071dcc6b5227134aa7edd407cd17ec42007d5b85e0e5a79d82c2999eb083c682"} Dec 11 14:18:04 crc kubenswrapper[4690]: I1211 14:18:04.628608 4690 patch_prober.go:28] interesting pod/machine-config-daemon-z9662 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 14:18:04 crc kubenswrapper[4690]: I1211 14:18:04.629009 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z9662" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 14:18:04 crc kubenswrapper[4690]: I1211 14:18:04.992463 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xq68s" event={"ID":"09e8430b-2226-4933-9dcf-ee3b5de076c3","Type":"ContainerStarted","Data":"c706f42716c86d5638dc57c2356aaf2dcab933dfe85672bc84d377553a86dede"} Dec 11 14:18:05 crc kubenswrapper[4690]: I1211 14:18:04.996544 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-24xfd" event={"ID":"0c61312d-523c-44a8-a451-dc96bba0f6d7","Type":"ContainerStarted","Data":"f9b8770287cb42349191b0d3035d30c84198fead1ff6129dbe148e5c34e471e5"} Dec 11 14:18:05 crc kubenswrapper[4690]: I1211 14:18:04.999836 4690 generic.go:334] "Generic (PLEG): container finished" podID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" containerID="62a8b1947c7d948f2fecbea6f9f5706f0fb7a775357e818dde42e687b87369d1" exitCode=0 Dec 11 14:18:05 crc kubenswrapper[4690]: I1211 14:18:04.999889 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r7754" event={"ID":"ffb685f2-c2fc-4602-8d81-5f11b6581f29","Type":"ContainerDied","Data":"62a8b1947c7d948f2fecbea6f9f5706f0fb7a775357e818dde42e687b87369d1"} Dec 11 14:18:05 crc kubenswrapper[4690]: I1211 14:18:05.003004 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k2lrt" event={"ID":"213bdee3-5fb6-4221-819e-43ec7a01f555","Type":"ContainerStarted","Data":"305761aace6f2c4ae9bb6a34588514d1e6fe31eb6c35a1cb270e9f11c09825b8"} Dec 11 14:18:05 crc kubenswrapper[4690]: I1211 14:18:05.005603 4690 generic.go:334] "Generic (PLEG): container finished" podID="907b18d4-d2b5-47c9-9c70-716bd64330ae" containerID="50bdcf3c73cbad74f028bc28f621ecce617ac58904d367b01795ab8beb1861a3" exitCode=0 Dec 11 14:18:05 crc kubenswrapper[4690]: I1211 14:18:05.005754 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kmjlt" event={"ID":"907b18d4-d2b5-47c9-9c70-716bd64330ae","Type":"ContainerDied","Data":"50bdcf3c73cbad74f028bc28f621ecce617ac58904d367b01795ab8beb1861a3"} Dec 11 14:18:05 crc kubenswrapper[4690]: I1211 14:18:05.027937 4690 generic.go:334] "Generic (PLEG): container finished" podID="9fca9021-9a9f-4d5d-9892-37f39c580323" containerID="071dcc6b5227134aa7edd407cd17ec42007d5b85e0e5a79d82c2999eb083c682" exitCode=0 Dec 11 14:18:05 crc kubenswrapper[4690]: I1211 14:18:05.028007 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djmxn" event={"ID":"9fca9021-9a9f-4d5d-9892-37f39c580323","Type":"ContainerDied","Data":"071dcc6b5227134aa7edd407cd17ec42007d5b85e0e5a79d82c2999eb083c682"} Dec 11 14:18:06 crc kubenswrapper[4690]: I1211 14:18:06.035390 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djmxn" event={"ID":"9fca9021-9a9f-4d5d-9892-37f39c580323","Type":"ContainerStarted","Data":"93f0ae52a70f9754902b1f3328ece5d657de29abec61d07297a34f8806e1f065"} Dec 11 14:18:06 crc kubenswrapper[4690]: I1211 14:18:06.036922 4690 generic.go:334] "Generic (PLEG): container finished" podID="09e8430b-2226-4933-9dcf-ee3b5de076c3" containerID="c706f42716c86d5638dc57c2356aaf2dcab933dfe85672bc84d377553a86dede" exitCode=0 Dec 11 14:18:06 crc kubenswrapper[4690]: I1211 14:18:06.037000 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xq68s" event={"ID":"09e8430b-2226-4933-9dcf-ee3b5de076c3","Type":"ContainerDied","Data":"c706f42716c86d5638dc57c2356aaf2dcab933dfe85672bc84d377553a86dede"} Dec 11 14:18:06 crc kubenswrapper[4690]: I1211 14:18:06.040166 4690 generic.go:334] "Generic (PLEG): container finished" podID="0c61312d-523c-44a8-a451-dc96bba0f6d7" containerID="f9b8770287cb42349191b0d3035d30c84198fead1ff6129dbe148e5c34e471e5" exitCode=0 Dec 11 14:18:06 crc kubenswrapper[4690]: I1211 14:18:06.040246 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-24xfd" event={"ID":"0c61312d-523c-44a8-a451-dc96bba0f6d7","Type":"ContainerDied","Data":"f9b8770287cb42349191b0d3035d30c84198fead1ff6129dbe148e5c34e471e5"} Dec 11 14:18:06 crc kubenswrapper[4690]: I1211 14:18:06.042641 4690 generic.go:334] "Generic (PLEG): container finished" podID="213bdee3-5fb6-4221-819e-43ec7a01f555" containerID="305761aace6f2c4ae9bb6a34588514d1e6fe31eb6c35a1cb270e9f11c09825b8" exitCode=0 Dec 11 14:18:06 crc kubenswrapper[4690]: I1211 14:18:06.042676 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k2lrt" event={"ID":"213bdee3-5fb6-4221-819e-43ec7a01f555","Type":"ContainerDied","Data":"305761aace6f2c4ae9bb6a34588514d1e6fe31eb6c35a1cb270e9f11c09825b8"} Dec 11 14:18:06 crc kubenswrapper[4690]: I1211 14:18:06.057719 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-djmxn" podStartSLOduration=2.885139713 podStartE2EDuration="2m39.057700026s" podCreationTimestamp="2025-12-11 14:15:27 +0000 UTC" firstStartedPulling="2025-12-11 14:15:29.56216109 +0000 UTC m=+61.177562733" lastFinishedPulling="2025-12-11 14:18:05.734721403 +0000 UTC m=+217.350123046" observedRunningTime="2025-12-11 14:18:06.053166325 +0000 UTC m=+217.668567968" watchObservedRunningTime="2025-12-11 14:18:06.057700026 +0000 UTC m=+217.673101669" Dec 11 14:18:07 crc kubenswrapper[4690]: I1211 14:18:07.051694 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r7754" event={"ID":"ffb685f2-c2fc-4602-8d81-5f11b6581f29","Type":"ContainerStarted","Data":"2ed5573f6b8998d3ae7701624250c0691f7a3c7f87a61806fbca3a7ee16328f7"} Dec 11 14:18:07 crc kubenswrapper[4690]: I1211 14:18:07.054341 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kmjlt" event={"ID":"907b18d4-d2b5-47c9-9c70-716bd64330ae","Type":"ContainerStarted","Data":"ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1"} Dec 11 14:18:07 crc kubenswrapper[4690]: I1211 14:18:07.072795 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-r7754" podStartSLOduration=4.98954191 podStartE2EDuration="2m38.072775301s" podCreationTimestamp="2025-12-11 14:15:29 +0000 UTC" firstStartedPulling="2025-12-11 14:15:33.029901669 +0000 UTC m=+64.645303302" lastFinishedPulling="2025-12-11 14:18:06.11313505 +0000 UTC m=+217.728536693" observedRunningTime="2025-12-11 14:18:07.069196895 +0000 UTC m=+218.684598538" watchObservedRunningTime="2025-12-11 14:18:07.072775301 +0000 UTC m=+218.688176944" Dec 11 14:18:07 crc kubenswrapper[4690]: I1211 14:18:07.088083 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kmjlt" podStartSLOduration=3.828531324 podStartE2EDuration="2m40.088065877s" podCreationTimestamp="2025-12-11 14:15:27 +0000 UTC" firstStartedPulling="2025-12-11 14:15:29.812320899 +0000 UTC m=+61.427722542" lastFinishedPulling="2025-12-11 14:18:06.071855452 +0000 UTC m=+217.687257095" observedRunningTime="2025-12-11 14:18:07.087148763 +0000 UTC m=+218.702550406" watchObservedRunningTime="2025-12-11 14:18:07.088065877 +0000 UTC m=+218.703467510" Dec 11 14:18:07 crc kubenswrapper[4690]: I1211 14:18:07.987171 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kmjlt" Dec 11 14:18:07 crc kubenswrapper[4690]: I1211 14:18:07.988088 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kmjlt" Dec 11 14:18:08 crc kubenswrapper[4690]: I1211 14:18:08.229258 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-djmxn" Dec 11 14:18:08 crc kubenswrapper[4690]: I1211 14:18:08.229327 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-djmxn" Dec 11 14:18:08 crc kubenswrapper[4690]: I1211 14:18:08.281387 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-djmxn" Dec 11 14:18:09 crc kubenswrapper[4690]: I1211 14:18:09.034126 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-kmjlt" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" containerName="registry-server" probeResult="failure" output=< Dec 11 14:18:09 crc kubenswrapper[4690]: timeout: failed to connect service ":50051" within 1s Dec 11 14:18:09 crc kubenswrapper[4690]: > Dec 11 14:18:09 crc kubenswrapper[4690]: I1211 14:18:09.072503 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-24xfd" event={"ID":"0c61312d-523c-44a8-a451-dc96bba0f6d7","Type":"ContainerStarted","Data":"614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4"} Dec 11 14:18:09 crc kubenswrapper[4690]: I1211 14:18:09.075125 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k2lrt" event={"ID":"213bdee3-5fb6-4221-819e-43ec7a01f555","Type":"ContainerStarted","Data":"b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b"} Dec 11 14:18:09 crc kubenswrapper[4690]: I1211 14:18:09.077049 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xq68s" event={"ID":"09e8430b-2226-4933-9dcf-ee3b5de076c3","Type":"ContainerStarted","Data":"f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4"} Dec 11 14:18:09 crc kubenswrapper[4690]: I1211 14:18:09.090592 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-24xfd" podStartSLOduration=3.904488412 podStartE2EDuration="2m39.09057106s" podCreationTimestamp="2025-12-11 14:15:30 +0000 UTC" firstStartedPulling="2025-12-11 14:15:33.03115885 +0000 UTC m=+64.646560493" lastFinishedPulling="2025-12-11 14:18:08.217241498 +0000 UTC m=+219.832643141" observedRunningTime="2025-12-11 14:18:09.088541576 +0000 UTC m=+220.703943229" watchObservedRunningTime="2025-12-11 14:18:09.09057106 +0000 UTC m=+220.705972703" Dec 11 14:18:09 crc kubenswrapper[4690]: I1211 14:18:09.110644 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xq68s" podStartSLOduration=2.9701415300000003 podStartE2EDuration="2m38.110627564s" podCreationTimestamp="2025-12-11 14:15:31 +0000 UTC" firstStartedPulling="2025-12-11 14:15:33.031329435 +0000 UTC m=+64.646731078" lastFinishedPulling="2025-12-11 14:18:08.171815469 +0000 UTC m=+219.787217112" observedRunningTime="2025-12-11 14:18:09.106260708 +0000 UTC m=+220.721662351" watchObservedRunningTime="2025-12-11 14:18:09.110627564 +0000 UTC m=+220.726029207" Dec 11 14:18:09 crc kubenswrapper[4690]: I1211 14:18:09.131019 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-k2lrt" podStartSLOduration=3.910787407 podStartE2EDuration="2m40.131001786s" podCreationTimestamp="2025-12-11 14:15:29 +0000 UTC" firstStartedPulling="2025-12-11 14:15:31.920364169 +0000 UTC m=+63.535765812" lastFinishedPulling="2025-12-11 14:18:08.140578548 +0000 UTC m=+219.755980191" observedRunningTime="2025-12-11 14:18:09.123434525 +0000 UTC m=+220.738836168" watchObservedRunningTime="2025-12-11 14:18:09.131001786 +0000 UTC m=+220.746403429" Dec 11 14:18:09 crc kubenswrapper[4690]: I1211 14:18:09.520667 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-k2lrt" Dec 11 14:18:09 crc kubenswrapper[4690]: I1211 14:18:09.521020 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-k2lrt" Dec 11 14:18:10 crc kubenswrapper[4690]: I1211 14:18:10.103467 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-r7754" Dec 11 14:18:10 crc kubenswrapper[4690]: I1211 14:18:10.103707 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-r7754" Dec 11 14:18:10 crc kubenswrapper[4690]: I1211 14:18:10.155483 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-r7754" Dec 11 14:18:10 crc kubenswrapper[4690]: I1211 14:18:10.579816 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-k2lrt" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" containerName="registry-server" probeResult="failure" output=< Dec 11 14:18:10 crc kubenswrapper[4690]: timeout: failed to connect service ":50051" within 1s Dec 11 14:18:10 crc kubenswrapper[4690]: > Dec 11 14:18:11 crc kubenswrapper[4690]: I1211 14:18:11.127141 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-24xfd" Dec 11 14:18:11 crc kubenswrapper[4690]: I1211 14:18:11.127687 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-24xfd" Dec 11 14:18:11 crc kubenswrapper[4690]: I1211 14:18:11.130972 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-r7754" Dec 11 14:18:11 crc kubenswrapper[4690]: I1211 14:18:11.503122 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xq68s" Dec 11 14:18:11 crc kubenswrapper[4690]: I1211 14:18:11.503161 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xq68s" Dec 11 14:18:12 crc kubenswrapper[4690]: I1211 14:18:12.062984 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-r7754"] Dec 11 14:18:12 crc kubenswrapper[4690]: I1211 14:18:12.168745 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-24xfd" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" containerName="registry-server" probeResult="failure" output=< Dec 11 14:18:12 crc kubenswrapper[4690]: timeout: failed to connect service ":50051" within 1s Dec 11 14:18:12 crc kubenswrapper[4690]: > Dec 11 14:18:12 crc kubenswrapper[4690]: I1211 14:18:12.545455 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xq68s" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" containerName="registry-server" probeResult="failure" output=< Dec 11 14:18:12 crc kubenswrapper[4690]: timeout: failed to connect service ":50051" within 1s Dec 11 14:18:12 crc kubenswrapper[4690]: > Dec 11 14:18:13 crc kubenswrapper[4690]: I1211 14:18:13.096933 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-r7754" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" containerName="registry-server" containerID="cri-o://2ed5573f6b8998d3ae7701624250c0691f7a3c7f87a61806fbca3a7ee16328f7" gracePeriod=2 Dec 11 14:18:18 crc kubenswrapper[4690]: I1211 14:18:18.024557 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kmjlt" Dec 11 14:18:18 crc kubenswrapper[4690]: I1211 14:18:18.064049 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kmjlt" Dec 11 14:18:18 crc kubenswrapper[4690]: I1211 14:18:18.258590 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kmjlt"] Dec 11 14:18:18 crc kubenswrapper[4690]: I1211 14:18:18.270982 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-djmxn" Dec 11 14:18:18 crc kubenswrapper[4690]: I1211 14:18:18.486233 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9wx9g"] Dec 11 14:18:19 crc kubenswrapper[4690]: I1211 14:18:19.566065 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-k2lrt" Dec 11 14:18:19 crc kubenswrapper[4690]: I1211 14:18:19.602727 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-k2lrt" Dec 11 14:18:20 crc kubenswrapper[4690]: E1211 14:18:20.104276 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ed5573f6b8998d3ae7701624250c0691f7a3c7f87a61806fbca3a7ee16328f7 is running failed: container process not found" containerID="2ed5573f6b8998d3ae7701624250c0691f7a3c7f87a61806fbca3a7ee16328f7" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:20 crc kubenswrapper[4690]: E1211 14:18:20.104809 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ed5573f6b8998d3ae7701624250c0691f7a3c7f87a61806fbca3a7ee16328f7 is running failed: container process not found" containerID="2ed5573f6b8998d3ae7701624250c0691f7a3c7f87a61806fbca3a7ee16328f7" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:20 crc kubenswrapper[4690]: E1211 14:18:20.105365 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ed5573f6b8998d3ae7701624250c0691f7a3c7f87a61806fbca3a7ee16328f7 is running failed: container process not found" containerID="2ed5573f6b8998d3ae7701624250c0691f7a3c7f87a61806fbca3a7ee16328f7" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:20 crc kubenswrapper[4690]: E1211 14:18:20.105407 4690 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 2ed5573f6b8998d3ae7701624250c0691f7a3c7f87a61806fbca3a7ee16328f7 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-r7754" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" containerName="registry-server" Dec 11 14:18:20 crc kubenswrapper[4690]: I1211 14:18:20.657670 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-djmxn"] Dec 11 14:18:21 crc kubenswrapper[4690]: I1211 14:18:21.169085 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-24xfd" Dec 11 14:18:21 crc kubenswrapper[4690]: I1211 14:18:21.207057 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-24xfd" Dec 11 14:18:21 crc kubenswrapper[4690]: I1211 14:18:21.543088 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xq68s" Dec 11 14:18:21 crc kubenswrapper[4690]: I1211 14:18:21.582636 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xq68s" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.716747 4690 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 14:18:22 crc kubenswrapper[4690]: E1211 14:18:22.717512 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7731421f-ecdd-4ff8-aa63-79ae9983425f" containerName="kube-multus-additional-cni-plugins" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.717531 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="7731421f-ecdd-4ff8-aa63-79ae9983425f" containerName="kube-multus-additional-cni-plugins" Dec 11 14:18:22 crc kubenswrapper[4690]: E1211 14:18:22.717546 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e36d4d2-cf14-40b1-897b-2704c32e12c4" containerName="pruner" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.717554 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e36d4d2-cf14-40b1-897b-2704c32e12c4" containerName="pruner" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.717676 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="7731421f-ecdd-4ff8-aa63-79ae9983425f" containerName="kube-multus-additional-cni-plugins" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.717695 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e36d4d2-cf14-40b1-897b-2704c32e12c4" containerName="pruner" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.718022 4690 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.718252 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://9db92bcfe7099e9a716af1be228bcdb626b2b2383c72901ebbd9ea1c1aceb5a0" gracePeriod=15 Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.718420 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.718791 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://9a8a4902e0c6ee75ff794209a28d8f53bd597ce51f863529640262e123086e81" gracePeriod=15 Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.718837 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://ad35e4e1b19e2eb60eaf01f14cac102b2923d73b6554b5313f9b03341bdbe2b0" gracePeriod=15 Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.718868 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://d35a4b3df00b428cbc8e2c97e31b53f1c712b14e5675c5dc1de3008e35d13f74" gracePeriod=15 Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.718901 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://48702dab61bba8195ae012efbdf07816dddea37d296c326397f4fcbdfe766e3e" gracePeriod=15 Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.719072 4690 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 14:18:22 crc kubenswrapper[4690]: E1211 14:18:22.719336 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.719351 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 11 14:18:22 crc kubenswrapper[4690]: E1211 14:18:22.719359 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.719365 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 11 14:18:22 crc kubenswrapper[4690]: E1211 14:18:22.719377 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.719383 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 11 14:18:22 crc kubenswrapper[4690]: E1211 14:18:22.719395 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.719403 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 14:18:22 crc kubenswrapper[4690]: E1211 14:18:22.719413 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.719421 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 11 14:18:22 crc kubenswrapper[4690]: E1211 14:18:22.719432 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.719437 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 11 14:18:22 crc kubenswrapper[4690]: E1211 14:18:22.719444 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.719451 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.719565 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.719577 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.719586 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.720299 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.720309 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.720318 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.758015 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.818358 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.818918 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.818999 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.819032 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.819056 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.819083 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.819179 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.819203 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.920084 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.920138 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.920197 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.920221 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.920254 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.920272 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.920289 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.920309 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.920391 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.920430 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.920451 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.920475 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.920494 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.920516 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.920541 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:18:22 crc kubenswrapper[4690]: I1211 14:18:22.920559 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.059393 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:18:23 crc kubenswrapper[4690]: W1211 14:18:23.084166 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-14ff369b3d7d1a0532af8978fbb830bf1961d57eacd2da3ff1c900f23dfbf1e4 WatchSource:0}: Error finding container 14ff369b3d7d1a0532af8978fbb830bf1961d57eacd2da3ff1c900f23dfbf1e4: Status 404 returned error can't find the container with id 14ff369b3d7d1a0532af8978fbb830bf1961d57eacd2da3ff1c900f23dfbf1e4 Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.183041 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fs97w"] Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.185410 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fs97w" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" containerName="registry-server" containerID="cri-o://b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806" gracePeriod=30 Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.189483 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-8bfnc"] Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.189698 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-8bfnc" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" containerName="registry-server" containerID="cri-o://cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503" gracePeriod=30 Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.192641 4690 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.206822 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-r2jbn"] Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.207118 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" containerName="marketplace-operator" containerID="cri-o://c7efa6125e6661f92724567c72836d33c207ea4ed35bcb7be157fa116ad5ccbb" gracePeriod=30 Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.211121 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k2lrt"] Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.218895 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7bxfp"] Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.219546 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.232608 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-24xfd"] Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.243267 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xq68s"] Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.257921 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7bxfp"] Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.335673 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2h79f\" (UniqueName: \"kubernetes.io/projected/203500e9-a305-42cd-9909-d64ca944d363-kube-api-access-2h79f\") pod \"marketplace-operator-79b997595-7bxfp\" (UID: \"203500e9-a305-42cd-9909-d64ca944d363\") " pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.335731 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/203500e9-a305-42cd-9909-d64ca944d363-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7bxfp\" (UID: \"203500e9-a305-42cd-9909-d64ca944d363\") " pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.335773 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/203500e9-a305-42cd-9909-d64ca944d363-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7bxfp\" (UID: \"203500e9-a305-42cd-9909-d64ca944d363\") " pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.436767 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/203500e9-a305-42cd-9909-d64ca944d363-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7bxfp\" (UID: \"203500e9-a305-42cd-9909-d64ca944d363\") " pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.436848 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2h79f\" (UniqueName: \"kubernetes.io/projected/203500e9-a305-42cd-9909-d64ca944d363-kube-api-access-2h79f\") pod \"marketplace-operator-79b997595-7bxfp\" (UID: \"203500e9-a305-42cd-9909-d64ca944d363\") " pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.436875 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/203500e9-a305-42cd-9909-d64ca944d363-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7bxfp\" (UID: \"203500e9-a305-42cd-9909-d64ca944d363\") " pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.438431 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/203500e9-a305-42cd-9909-d64ca944d363-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7bxfp\" (UID: \"203500e9-a305-42cd-9909-d64ca944d363\") " pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.442410 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/203500e9-a305-42cd-9909-d64ca944d363-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7bxfp\" (UID: \"203500e9-a305-42cd-9909-d64ca944d363\") " pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.453752 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2h79f\" (UniqueName: \"kubernetes.io/projected/203500e9-a305-42cd-9909-d64ca944d363-kube-api-access-2h79f\") pod \"marketplace-operator-79b997595-7bxfp\" (UID: \"203500e9-a305-42cd-9909-d64ca944d363\") " pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:23 crc kubenswrapper[4690]: I1211 14:18:23.544759 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:24 crc kubenswrapper[4690]: E1211 14:18:24.058824 4690 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:24 crc kubenswrapper[4690]: E1211 14:18:24.059623 4690 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:24 crc kubenswrapper[4690]: E1211 14:18:24.060081 4690 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:24 crc kubenswrapper[4690]: E1211 14:18:24.060269 4690 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:24 crc kubenswrapper[4690]: E1211 14:18:24.060418 4690 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:24 crc kubenswrapper[4690]: I1211 14:18:24.060438 4690 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 11 14:18:24 crc kubenswrapper[4690]: E1211 14:18:24.060580 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="200ms" Dec 11 14:18:24 crc kubenswrapper[4690]: E1211 14:18:24.064801 4690 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 11 14:18:24 crc kubenswrapper[4690]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-7bxfp_openshift-marketplace_203500e9-a305-42cd-9909-d64ca944d363_0(929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13): error adding pod openshift-marketplace_marketplace-operator-79b997595-7bxfp to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13" Netns:"/var/run/netns/e27a2426-a164-4a42-9b0f-8d41a5f46787" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-7bxfp;K8S_POD_INFRA_CONTAINER_ID=929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13;K8S_POD_UID=203500e9-a305-42cd-9909-d64ca944d363" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-7bxfp] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-7bxfp/203500e9-a305-42cd-9909-d64ca944d363]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-7bxfp?timeout=1m0s": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:18:24 crc kubenswrapper[4690]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 11 14:18:24 crc kubenswrapper[4690]: > Dec 11 14:18:24 crc kubenswrapper[4690]: E1211 14:18:24.064848 4690 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 11 14:18:24 crc kubenswrapper[4690]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-7bxfp_openshift-marketplace_203500e9-a305-42cd-9909-d64ca944d363_0(929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13): error adding pod openshift-marketplace_marketplace-operator-79b997595-7bxfp to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13" Netns:"/var/run/netns/e27a2426-a164-4a42-9b0f-8d41a5f46787" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-7bxfp;K8S_POD_INFRA_CONTAINER_ID=929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13;K8S_POD_UID=203500e9-a305-42cd-9909-d64ca944d363" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-7bxfp] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-7bxfp/203500e9-a305-42cd-9909-d64ca944d363]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-7bxfp?timeout=1m0s": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:18:24 crc kubenswrapper[4690]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 11 14:18:24 crc kubenswrapper[4690]: > pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:24 crc kubenswrapper[4690]: E1211 14:18:24.064869 4690 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 11 14:18:24 crc kubenswrapper[4690]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-7bxfp_openshift-marketplace_203500e9-a305-42cd-9909-d64ca944d363_0(929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13): error adding pod openshift-marketplace_marketplace-operator-79b997595-7bxfp to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13" Netns:"/var/run/netns/e27a2426-a164-4a42-9b0f-8d41a5f46787" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-7bxfp;K8S_POD_INFRA_CONTAINER_ID=929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13;K8S_POD_UID=203500e9-a305-42cd-9909-d64ca944d363" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-7bxfp] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-7bxfp/203500e9-a305-42cd-9909-d64ca944d363]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-7bxfp?timeout=1m0s": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:18:24 crc kubenswrapper[4690]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 11 14:18:24 crc kubenswrapper[4690]: > pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:24 crc kubenswrapper[4690]: E1211 14:18:24.064934 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"marketplace-operator-79b997595-7bxfp_openshift-marketplace(203500e9-a305-42cd-9909-d64ca944d363)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"marketplace-operator-79b997595-7bxfp_openshift-marketplace(203500e9-a305-42cd-9909-d64ca944d363)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-7bxfp_openshift-marketplace_203500e9-a305-42cd-9909-d64ca944d363_0(929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13): error adding pod openshift-marketplace_marketplace-operator-79b997595-7bxfp to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13\\\" Netns:\\\"/var/run/netns/e27a2426-a164-4a42-9b0f-8d41a5f46787\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-7bxfp;K8S_POD_INFRA_CONTAINER_ID=929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13;K8S_POD_UID=203500e9-a305-42cd-9909-d64ca944d363\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-7bxfp] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-7bxfp/203500e9-a305-42cd-9909-d64ca944d363]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-7bxfp?timeout=1m0s\\\": dial tcp 38.102.83.51:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" podUID="203500e9-a305-42cd-9909-d64ca944d363" Dec 11 14:18:24 crc kubenswrapper[4690]: E1211 14:18:24.065259 4690 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.51:6443: connect: connection refused" event=< Dec 11 14:18:24 crc kubenswrapper[4690]: &Event{ObjectMeta:{marketplace-operator-79b997595-7bxfp.18802ef9bbfd3991 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:marketplace-operator-79b997595-7bxfp,UID:203500e9-a305-42cd-9909-d64ca944d363,APIVersion:v1,ResourceVersion:29396,FieldPath:,},Reason:FailedCreatePodSandBox,Message:Failed to create pod sandbox: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-7bxfp_openshift-marketplace_203500e9-a305-42cd-9909-d64ca944d363_0(929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13): error adding pod openshift-marketplace_marketplace-operator-79b997595-7bxfp to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13" Netns:"/var/run/netns/e27a2426-a164-4a42-9b0f-8d41a5f46787" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-7bxfp;K8S_POD_INFRA_CONTAINER_ID=929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13;K8S_POD_UID=203500e9-a305-42cd-9909-d64ca944d363" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-7bxfp] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-7bxfp/203500e9-a305-42cd-9909-d64ca944d363]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-7bxfp?timeout=1m0s": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:18:24 crc kubenswrapper[4690]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"},Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 14:18:24.064903569 +0000 UTC m=+235.680305212,LastTimestamp:2025-12-11 14:18:24.064903569 +0000 UTC m=+235.680305212,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Dec 11 14:18:24 crc kubenswrapper[4690]: > Dec 11 14:18:24 crc kubenswrapper[4690]: I1211 14:18:24.171838 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r7754_ffb685f2-c2fc-4602-8d81-5f11b6581f29/registry-server/0.log" Dec 11 14:18:24 crc kubenswrapper[4690]: I1211 14:18:24.173240 4690 generic.go:334] "Generic (PLEG): container finished" podID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" containerID="2ed5573f6b8998d3ae7701624250c0691f7a3c7f87a61806fbca3a7ee16328f7" exitCode=-1 Dec 11 14:18:24 crc kubenswrapper[4690]: I1211 14:18:24.173308 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r7754" event={"ID":"ffb685f2-c2fc-4602-8d81-5f11b6581f29","Type":"ContainerDied","Data":"2ed5573f6b8998d3ae7701624250c0691f7a3c7f87a61806fbca3a7ee16328f7"} Dec 11 14:18:24 crc kubenswrapper[4690]: I1211 14:18:24.173524 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-djmxn" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" containerName="registry-server" containerID="cri-o://93f0ae52a70f9754902b1f3328ece5d657de29abec61d07297a34f8806e1f065" gracePeriod=2 Dec 11 14:18:24 crc kubenswrapper[4690]: I1211 14:18:24.173843 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kmjlt" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" containerName="registry-server" containerID="cri-o://ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1" gracePeriod=2 Dec 11 14:18:24 crc kubenswrapper[4690]: I1211 14:18:24.174476 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:24 crc kubenswrapper[4690]: I1211 14:18:24.174821 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:24 crc kubenswrapper[4690]: I1211 14:18:24.175510 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:24 crc kubenswrapper[4690]: I1211 14:18:24.175932 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:24 crc kubenswrapper[4690]: E1211 14:18:24.262311 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="400ms" Dec 11 14:18:24 crc kubenswrapper[4690]: E1211 14:18:24.663114 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="800ms" Dec 11 14:18:25 crc kubenswrapper[4690]: I1211 14:18:25.180979 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 11 14:18:25 crc kubenswrapper[4690]: I1211 14:18:25.183512 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 14:18:25 crc kubenswrapper[4690]: I1211 14:18:25.184356 4690 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="48702dab61bba8195ae012efbdf07816dddea37d296c326397f4fcbdfe766e3e" exitCode=2 Dec 11 14:18:25 crc kubenswrapper[4690]: I1211 14:18:25.187305 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:25 crc kubenswrapper[4690]: I1211 14:18:25.187371 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"14ff369b3d7d1a0532af8978fbb830bf1961d57eacd2da3ff1c900f23dfbf1e4"} Dec 11 14:18:25 crc kubenswrapper[4690]: I1211 14:18:25.188059 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:25 crc kubenswrapper[4690]: E1211 14:18:25.464328 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="1.6s" Dec 11 14:18:25 crc kubenswrapper[4690]: E1211 14:18:25.511684 4690 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 11 14:18:25 crc kubenswrapper[4690]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-7bxfp_openshift-marketplace_203500e9-a305-42cd-9909-d64ca944d363_0(2df968d7d3e5f43dbb7848d976ca9e69050a7e551de3219ba9c356c8e704cb7a): error adding pod openshift-marketplace_marketplace-operator-79b997595-7bxfp to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"2df968d7d3e5f43dbb7848d976ca9e69050a7e551de3219ba9c356c8e704cb7a" Netns:"/var/run/netns/a2f11337-4cfe-4fbc-bb12-5428d1ff7212" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-7bxfp;K8S_POD_INFRA_CONTAINER_ID=2df968d7d3e5f43dbb7848d976ca9e69050a7e551de3219ba9c356c8e704cb7a;K8S_POD_UID=203500e9-a305-42cd-9909-d64ca944d363" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-7bxfp] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-7bxfp/203500e9-a305-42cd-9909-d64ca944d363]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-7bxfp?timeout=1m0s": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:18:25 crc kubenswrapper[4690]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 11 14:18:25 crc kubenswrapper[4690]: > Dec 11 14:18:25 crc kubenswrapper[4690]: E1211 14:18:25.511767 4690 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 11 14:18:25 crc kubenswrapper[4690]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-7bxfp_openshift-marketplace_203500e9-a305-42cd-9909-d64ca944d363_0(2df968d7d3e5f43dbb7848d976ca9e69050a7e551de3219ba9c356c8e704cb7a): error adding pod openshift-marketplace_marketplace-operator-79b997595-7bxfp to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"2df968d7d3e5f43dbb7848d976ca9e69050a7e551de3219ba9c356c8e704cb7a" Netns:"/var/run/netns/a2f11337-4cfe-4fbc-bb12-5428d1ff7212" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-7bxfp;K8S_POD_INFRA_CONTAINER_ID=2df968d7d3e5f43dbb7848d976ca9e69050a7e551de3219ba9c356c8e704cb7a;K8S_POD_UID=203500e9-a305-42cd-9909-d64ca944d363" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-7bxfp] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-7bxfp/203500e9-a305-42cd-9909-d64ca944d363]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-7bxfp?timeout=1m0s": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:18:25 crc kubenswrapper[4690]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 11 14:18:25 crc kubenswrapper[4690]: > pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:25 crc kubenswrapper[4690]: E1211 14:18:25.511789 4690 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 11 14:18:25 crc kubenswrapper[4690]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-7bxfp_openshift-marketplace_203500e9-a305-42cd-9909-d64ca944d363_0(2df968d7d3e5f43dbb7848d976ca9e69050a7e551de3219ba9c356c8e704cb7a): error adding pod openshift-marketplace_marketplace-operator-79b997595-7bxfp to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"2df968d7d3e5f43dbb7848d976ca9e69050a7e551de3219ba9c356c8e704cb7a" Netns:"/var/run/netns/a2f11337-4cfe-4fbc-bb12-5428d1ff7212" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-7bxfp;K8S_POD_INFRA_CONTAINER_ID=2df968d7d3e5f43dbb7848d976ca9e69050a7e551de3219ba9c356c8e704cb7a;K8S_POD_UID=203500e9-a305-42cd-9909-d64ca944d363" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-7bxfp] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-7bxfp/203500e9-a305-42cd-9909-d64ca944d363]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-7bxfp?timeout=1m0s": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:18:25 crc kubenswrapper[4690]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 11 14:18:25 crc kubenswrapper[4690]: > pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:25 crc kubenswrapper[4690]: E1211 14:18:25.511865 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"marketplace-operator-79b997595-7bxfp_openshift-marketplace(203500e9-a305-42cd-9909-d64ca944d363)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"marketplace-operator-79b997595-7bxfp_openshift-marketplace(203500e9-a305-42cd-9909-d64ca944d363)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-7bxfp_openshift-marketplace_203500e9-a305-42cd-9909-d64ca944d363_0(2df968d7d3e5f43dbb7848d976ca9e69050a7e551de3219ba9c356c8e704cb7a): error adding pod openshift-marketplace_marketplace-operator-79b997595-7bxfp to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"2df968d7d3e5f43dbb7848d976ca9e69050a7e551de3219ba9c356c8e704cb7a\\\" Netns:\\\"/var/run/netns/a2f11337-4cfe-4fbc-bb12-5428d1ff7212\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-7bxfp;K8S_POD_INFRA_CONTAINER_ID=2df968d7d3e5f43dbb7848d976ca9e69050a7e551de3219ba9c356c8e704cb7a;K8S_POD_UID=203500e9-a305-42cd-9909-d64ca944d363\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-7bxfp] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-7bxfp/203500e9-a305-42cd-9909-d64ca944d363]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-7bxfp?timeout=1m0s\\\": dial tcp 38.102.83.51:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" podUID="203500e9-a305-42cd-9909-d64ca944d363" Dec 11 14:18:26 crc kubenswrapper[4690]: I1211 14:18:26.192130 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-k2lrt" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" containerName="registry-server" containerID="cri-o://b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b" gracePeriod=30 Dec 11 14:18:26 crc kubenswrapper[4690]: I1211 14:18:26.192225 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xq68s" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" containerName="registry-server" containerID="cri-o://f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4" gracePeriod=30 Dec 11 14:18:26 crc kubenswrapper[4690]: I1211 14:18:26.192474 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-24xfd" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" containerName="registry-server" containerID="cri-o://614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4" gracePeriod=30 Dec 11 14:18:26 crc kubenswrapper[4690]: I1211 14:18:26.193753 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:26 crc kubenswrapper[4690]: I1211 14:18:26.194079 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:26 crc kubenswrapper[4690]: I1211 14:18:26.194339 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:26 crc kubenswrapper[4690]: I1211 14:18:26.194647 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:26 crc kubenswrapper[4690]: I1211 14:18:26.195217 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:26 crc kubenswrapper[4690]: I1211 14:18:26.195471 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:26 crc kubenswrapper[4690]: I1211 14:18:26.195711 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:26 crc kubenswrapper[4690]: I1211 14:18:26.196161 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:26 crc kubenswrapper[4690]: I1211 14:18:26.196410 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:27 crc kubenswrapper[4690]: E1211 14:18:27.067679 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="3.2s" Dec 11 14:18:27 crc kubenswrapper[4690]: E1211 14:18:27.578697 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503 is running failed: container process not found" containerID="cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:27 crc kubenswrapper[4690]: E1211 14:18:27.579154 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503 is running failed: container process not found" containerID="cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:27 crc kubenswrapper[4690]: E1211 14:18:27.579429 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503 is running failed: container process not found" containerID="cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:27 crc kubenswrapper[4690]: E1211 14:18:27.579467 4690 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-8bfnc" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" containerName="registry-server" Dec 11 14:18:27 crc kubenswrapper[4690]: E1211 14:18:27.720444 4690 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.51:6443: connect: connection refused" event=< Dec 11 14:18:27 crc kubenswrapper[4690]: &Event{ObjectMeta:{marketplace-operator-79b997595-7bxfp.18802ef9bbfd3991 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:marketplace-operator-79b997595-7bxfp,UID:203500e9-a305-42cd-9909-d64ca944d363,APIVersion:v1,ResourceVersion:29396,FieldPath:,},Reason:FailedCreatePodSandBox,Message:Failed to create pod sandbox: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-7bxfp_openshift-marketplace_203500e9-a305-42cd-9909-d64ca944d363_0(929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13): error adding pod openshift-marketplace_marketplace-operator-79b997595-7bxfp to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13" Netns:"/var/run/netns/e27a2426-a164-4a42-9b0f-8d41a5f46787" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-7bxfp;K8S_POD_INFRA_CONTAINER_ID=929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13;K8S_POD_UID=203500e9-a305-42cd-9909-d64ca944d363" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-7bxfp] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-7bxfp/203500e9-a305-42cd-9909-d64ca944d363]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-7bxfp?timeout=1m0s": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:18:27 crc kubenswrapper[4690]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"},Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 14:18:24.064903569 +0000 UTC m=+235.680305212,LastTimestamp:2025-12-11 14:18:24.064903569 +0000 UTC m=+235.680305212,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Dec 11 14:18:27 crc kubenswrapper[4690]: > Dec 11 14:18:27 crc kubenswrapper[4690]: E1211 14:18:27.780574 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806 is running failed: container process not found" containerID="b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:27 crc kubenswrapper[4690]: E1211 14:18:27.781103 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806 is running failed: container process not found" containerID="b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:27 crc kubenswrapper[4690]: E1211 14:18:27.781547 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806 is running failed: container process not found" containerID="b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:27 crc kubenswrapper[4690]: E1211 14:18:27.781598 4690 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-fs97w" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" containerName="registry-server" Dec 11 14:18:27 crc kubenswrapper[4690]: E1211 14:18:27.987571 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1 is running failed: container process not found" containerID="ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:27 crc kubenswrapper[4690]: E1211 14:18:27.987947 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1 is running failed: container process not found" containerID="ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:27 crc kubenswrapper[4690]: E1211 14:18:27.988343 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1 is running failed: container process not found" containerID="ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:27 crc kubenswrapper[4690]: E1211 14:18:27.988373 4690 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-kmjlt" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" containerName="registry-server" Dec 11 14:18:28 crc kubenswrapper[4690]: E1211 14:18:28.225827 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 93f0ae52a70f9754902b1f3328ece5d657de29abec61d07297a34f8806e1f065 is running failed: container process not found" containerID="93f0ae52a70f9754902b1f3328ece5d657de29abec61d07297a34f8806e1f065" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:28 crc kubenswrapper[4690]: E1211 14:18:28.226170 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 93f0ae52a70f9754902b1f3328ece5d657de29abec61d07297a34f8806e1f065 is running failed: container process not found" containerID="93f0ae52a70f9754902b1f3328ece5d657de29abec61d07297a34f8806e1f065" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:28 crc kubenswrapper[4690]: E1211 14:18:28.226499 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 93f0ae52a70f9754902b1f3328ece5d657de29abec61d07297a34f8806e1f065 is running failed: container process not found" containerID="93f0ae52a70f9754902b1f3328ece5d657de29abec61d07297a34f8806e1f065" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:28 crc kubenswrapper[4690]: E1211 14:18:28.226534 4690 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 93f0ae52a70f9754902b1f3328ece5d657de29abec61d07297a34f8806e1f065 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-djmxn" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" containerName="registry-server" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.626359 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-r7754_ffb685f2-c2fc-4602-8d81-5f11b6581f29/registry-server/0.log" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.627393 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r7754" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.627916 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.628249 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.628648 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.628907 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.629174 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.629414 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.633433 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.633740 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.634069 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.634455 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.634747 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.635240 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.708465 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffb685f2-c2fc-4602-8d81-5f11b6581f29-catalog-content\") pod \"ffb685f2-c2fc-4602-8d81-5f11b6581f29\" (UID: \"ffb685f2-c2fc-4602-8d81-5f11b6581f29\") " Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.708537 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffb685f2-c2fc-4602-8d81-5f11b6581f29-utilities\") pod \"ffb685f2-c2fc-4602-8d81-5f11b6581f29\" (UID: \"ffb685f2-c2fc-4602-8d81-5f11b6581f29\") " Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.708569 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tc9w\" (UniqueName: \"kubernetes.io/projected/ffb685f2-c2fc-4602-8d81-5f11b6581f29-kube-api-access-8tc9w\") pod \"ffb685f2-c2fc-4602-8d81-5f11b6581f29\" (UID: \"ffb685f2-c2fc-4602-8d81-5f11b6581f29\") " Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.709520 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffb685f2-c2fc-4602-8d81-5f11b6581f29-utilities" (OuterVolumeSpecName: "utilities") pod "ffb685f2-c2fc-4602-8d81-5f11b6581f29" (UID: "ffb685f2-c2fc-4602-8d81-5f11b6581f29"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.714347 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffb685f2-c2fc-4602-8d81-5f11b6581f29-kube-api-access-8tc9w" (OuterVolumeSpecName: "kube-api-access-8tc9w") pod "ffb685f2-c2fc-4602-8d81-5f11b6581f29" (UID: "ffb685f2-c2fc-4602-8d81-5f11b6581f29"). InnerVolumeSpecName "kube-api-access-8tc9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.729810 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffb685f2-c2fc-4602-8d81-5f11b6581f29-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ffb685f2-c2fc-4602-8d81-5f11b6581f29" (UID: "ffb685f2-c2fc-4602-8d81-5f11b6581f29"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.809798 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffb685f2-c2fc-4602-8d81-5f11b6581f29-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.809834 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tc9w\" (UniqueName: \"kubernetes.io/projected/ffb685f2-c2fc-4602-8d81-5f11b6581f29-kube-api-access-8tc9w\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:28 crc kubenswrapper[4690]: I1211 14:18:28.809846 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffb685f2-c2fc-4602-8d81-5f11b6581f29-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:29 crc kubenswrapper[4690]: E1211 14:18:29.521696 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b is running failed: container process not found" containerID="b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:29 crc kubenswrapper[4690]: E1211 14:18:29.522372 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b is running failed: container process not found" containerID="b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:29 crc kubenswrapper[4690]: E1211 14:18:29.522671 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b is running failed: container process not found" containerID="b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:29 crc kubenswrapper[4690]: E1211 14:18:29.522722 4690 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-k2lrt" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" containerName="registry-server" Dec 11 14:18:30 crc kubenswrapper[4690]: E1211 14:18:30.268677 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="6.4s" Dec 11 14:18:31 crc kubenswrapper[4690]: E1211 14:18:31.128512 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4 is running failed: container process not found" containerID="614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:31 crc kubenswrapper[4690]: E1211 14:18:31.129446 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4 is running failed: container process not found" containerID="614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:31 crc kubenswrapper[4690]: E1211 14:18:31.129857 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4 is running failed: container process not found" containerID="614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:31 crc kubenswrapper[4690]: E1211 14:18:31.129986 4690 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-24xfd" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" containerName="registry-server" Dec 11 14:18:31 crc kubenswrapper[4690]: E1211 14:18:31.503982 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4 is running failed: container process not found" containerID="f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:31 crc kubenswrapper[4690]: E1211 14:18:31.504508 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4 is running failed: container process not found" containerID="f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:31 crc kubenswrapper[4690]: E1211 14:18:31.504923 4690 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4 is running failed: container process not found" containerID="f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4" cmd=["grpc_health_probe","-addr=:50051"] Dec 11 14:18:31 crc kubenswrapper[4690]: E1211 14:18:31.504980 4690 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-xq68s" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" containerName="registry-server" Dec 11 14:18:32 crc kubenswrapper[4690]: E1211 14:18:32.521486 4690 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/system.slice/NetworkManager-dispatcher.service\": RecentStats: unable to find data in memory cache]" Dec 11 14:18:32 crc kubenswrapper[4690]: I1211 14:18:32.755972 4690 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-r2jbn container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Dec 11 14:18:32 crc kubenswrapper[4690]: I1211 14:18:32.756273 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.25:8080/healthz\": dial tcp 10.217.0.25:8080: connect: connection refused" Dec 11 14:18:33 crc kubenswrapper[4690]: I1211 14:18:33.125122 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Dec 11 14:18:33 crc kubenswrapper[4690]: I1211 14:18:33.127106 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 11 14:18:34 crc kubenswrapper[4690]: I1211 14:18:34.627355 4690 patch_prober.go:28] interesting pod/machine-config-daemon-z9662 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 14:18:34 crc kubenswrapper[4690]: I1211 14:18:34.627644 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z9662" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 14:18:34 crc kubenswrapper[4690]: I1211 14:18:34.627687 4690 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-z9662" Dec 11 14:18:34 crc kubenswrapper[4690]: I1211 14:18:34.628276 4690 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0e4832b2b6dce8997cd247f1916ab6a197fa3f64006449b295e150efb8bd4eb7"} pod="openshift-machine-config-operator/machine-config-daemon-z9662" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 14:18:34 crc kubenswrapper[4690]: I1211 14:18:34.628484 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-z9662" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" containerName="machine-config-daemon" containerID="cri-o://0e4832b2b6dce8997cd247f1916ab6a197fa3f64006449b295e150efb8bd4eb7" gracePeriod=600 Dec 11 14:18:36 crc kubenswrapper[4690]: E1211 14:18:36.670032 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="7s" Dec 11 14:18:36 crc kubenswrapper[4690]: I1211 14:18:36.864387 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 14:18:36 crc kubenswrapper[4690]: I1211 14:18:36.865310 4690 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9a8a4902e0c6ee75ff794209a28d8f53bd597ce51f863529640262e123086e81" exitCode=-1 Dec 11 14:18:36 crc kubenswrapper[4690]: I1211 14:18:36.865331 4690 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ad35e4e1b19e2eb60eaf01f14cac102b2923d73b6554b5313f9b03341bdbe2b0" exitCode=0 Dec 11 14:18:36 crc kubenswrapper[4690]: I1211 14:18:36.865341 4690 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d35a4b3df00b428cbc8e2c97e31b53f1c712b14e5675c5dc1de3008e35d13f74" exitCode=0 Dec 11 14:18:36 crc kubenswrapper[4690]: I1211 14:18:36.865394 4690 scope.go:117] "RemoveContainer" containerID="a9615188d32950a37981ff1d2404296744222ce502221df93893d3cbdafdb9c4" Dec 11 14:18:36 crc kubenswrapper[4690]: I1211 14:18:36.866918 4690 generic.go:334] "Generic (PLEG): container finished" podID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" containerID="c7efa6125e6661f92724567c72836d33c207ea4ed35bcb7be157fa116ad5ccbb" exitCode=0 Dec 11 14:18:36 crc kubenswrapper[4690]: I1211 14:18:36.866968 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" event={"ID":"00f7dbc0-5b6f-4f74-a4e4-43759758be95","Type":"ContainerDied","Data":"c7efa6125e6661f92724567c72836d33c207ea4ed35bcb7be157fa116ad5ccbb"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.125179 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.127023 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.128213 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.128449 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.128623 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.128972 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.129451 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.130414 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.130644 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.130910 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.131495 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.131799 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.132201 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.132501 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.132786 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.132914 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8bfnc" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.133041 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.133333 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.133636 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.134176 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.134552 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.135605 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.135913 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.136418 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fs97w" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.136675 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.137050 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.137325 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.137583 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.137866 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.138333 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.138535 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.138743 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.138992 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.139275 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.139522 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.139762 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.139906 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xq68s" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.139985 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.140263 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.140445 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.140678 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.140847 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.141049 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.141256 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.141493 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.141733 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.141893 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.142066 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.142244 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.142414 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.144548 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-djmxn" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.144844 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.145034 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.145550 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.145993 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.146324 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.146648 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.146943 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.147244 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.147555 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.147880 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.149470 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kmjlt" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.149890 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.150118 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.150347 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.150600 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.150924 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.151188 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.151430 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.151626 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.151880 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.152170 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.153875 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k2lrt" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.154204 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.154505 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.154913 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.155177 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.155379 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.155685 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.155964 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.156311 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.156569 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.156781 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.158646 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-24xfd" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.159137 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.159421 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.159670 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.159963 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.160278 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.160547 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.160804 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.161049 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.161377 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.161588 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.231705 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fca9021-9a9f-4d5d-9892-37f39c580323-utilities\") pod \"9fca9021-9a9f-4d5d-9892-37f39c580323\" (UID: \"9fca9021-9a9f-4d5d-9892-37f39c580323\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.231809 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/00f7dbc0-5b6f-4f74-a4e4-43759758be95-marketplace-operator-metrics\") pod \"00f7dbc0-5b6f-4f74-a4e4-43759758be95\" (UID: \"00f7dbc0-5b6f-4f74-a4e4-43759758be95\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.231857 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j96wl\" (UniqueName: \"kubernetes.io/projected/907b18d4-d2b5-47c9-9c70-716bd64330ae-kube-api-access-j96wl\") pod \"907b18d4-d2b5-47c9-9c70-716bd64330ae\" (UID: \"907b18d4-d2b5-47c9-9c70-716bd64330ae\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.231888 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd40416-92d2-41ec-b6ae-ba668ccc5685-utilities\") pod \"8cd40416-92d2-41ec-b6ae-ba668ccc5685\" (UID: \"8cd40416-92d2-41ec-b6ae-ba668ccc5685\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.231910 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fca9021-9a9f-4d5d-9892-37f39c580323-catalog-content\") pod \"9fca9021-9a9f-4d5d-9892-37f39c580323\" (UID: \"9fca9021-9a9f-4d5d-9892-37f39c580323\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.232002 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd40416-92d2-41ec-b6ae-ba668ccc5685-catalog-content\") pod \"8cd40416-92d2-41ec-b6ae-ba668ccc5685\" (UID: \"8cd40416-92d2-41ec-b6ae-ba668ccc5685\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.232029 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/907b18d4-d2b5-47c9-9c70-716bd64330ae-catalog-content\") pod \"907b18d4-d2b5-47c9-9c70-716bd64330ae\" (UID: \"907b18d4-d2b5-47c9-9c70-716bd64330ae\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.232052 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c61312d-523c-44a8-a451-dc96bba0f6d7-catalog-content\") pod \"0c61312d-523c-44a8-a451-dc96bba0f6d7\" (UID: \"0c61312d-523c-44a8-a451-dc96bba0f6d7\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.232076 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwgxp\" (UniqueName: \"kubernetes.io/projected/213bdee3-5fb6-4221-819e-43ec7a01f555-kube-api-access-pwgxp\") pod \"213bdee3-5fb6-4221-819e-43ec7a01f555\" (UID: \"213bdee3-5fb6-4221-819e-43ec7a01f555\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.232107 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/907b18d4-d2b5-47c9-9c70-716bd64330ae-utilities\") pod \"907b18d4-d2b5-47c9-9c70-716bd64330ae\" (UID: \"907b18d4-d2b5-47c9-9c70-716bd64330ae\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.232134 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4psmc\" (UniqueName: \"kubernetes.io/projected/00f7dbc0-5b6f-4f74-a4e4-43759758be95-kube-api-access-4psmc\") pod \"00f7dbc0-5b6f-4f74-a4e4-43759758be95\" (UID: \"00f7dbc0-5b6f-4f74-a4e4-43759758be95\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.232674 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fca9021-9a9f-4d5d-9892-37f39c580323-utilities" (OuterVolumeSpecName: "utilities") pod "9fca9021-9a9f-4d5d-9892-37f39c580323" (UID: "9fca9021-9a9f-4d5d-9892-37f39c580323"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.233063 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/907b18d4-d2b5-47c9-9c70-716bd64330ae-utilities" (OuterVolumeSpecName: "utilities") pod "907b18d4-d2b5-47c9-9c70-716bd64330ae" (UID: "907b18d4-d2b5-47c9-9c70-716bd64330ae"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.233444 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/213bdee3-5fb6-4221-819e-43ec7a01f555-utilities" (OuterVolumeSpecName: "utilities") pod "213bdee3-5fb6-4221-819e-43ec7a01f555" (UID: "213bdee3-5fb6-4221-819e-43ec7a01f555"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.233497 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cd40416-92d2-41ec-b6ae-ba668ccc5685-utilities" (OuterVolumeSpecName: "utilities") pod "8cd40416-92d2-41ec-b6ae-ba668ccc5685" (UID: "8cd40416-92d2-41ec-b6ae-ba668ccc5685"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.237310 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/907b18d4-d2b5-47c9-9c70-716bd64330ae-kube-api-access-j96wl" (OuterVolumeSpecName: "kube-api-access-j96wl") pod "907b18d4-d2b5-47c9-9c70-716bd64330ae" (UID: "907b18d4-d2b5-47c9-9c70-716bd64330ae"). InnerVolumeSpecName "kube-api-access-j96wl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.237327 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/213bdee3-5fb6-4221-819e-43ec7a01f555-kube-api-access-pwgxp" (OuterVolumeSpecName: "kube-api-access-pwgxp") pod "213bdee3-5fb6-4221-819e-43ec7a01f555" (UID: "213bdee3-5fb6-4221-819e-43ec7a01f555"). InnerVolumeSpecName "kube-api-access-pwgxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.237484 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00f7dbc0-5b6f-4f74-a4e4-43759758be95-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "00f7dbc0-5b6f-4f74-a4e4-43759758be95" (UID: "00f7dbc0-5b6f-4f74-a4e4-43759758be95"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.237571 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00f7dbc0-5b6f-4f74-a4e4-43759758be95-kube-api-access-4psmc" (OuterVolumeSpecName: "kube-api-access-4psmc") pod "00f7dbc0-5b6f-4f74-a4e4-43759758be95" (UID: "00f7dbc0-5b6f-4f74-a4e4-43759758be95"). InnerVolumeSpecName "kube-api-access-4psmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.243120 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/213bdee3-5fb6-4221-819e-43ec7a01f555-utilities\") pod \"213bdee3-5fb6-4221-819e-43ec7a01f555\" (UID: \"213bdee3-5fb6-4221-819e-43ec7a01f555\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.243215 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.243265 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09e8430b-2226-4933-9dcf-ee3b5de076c3-catalog-content\") pod \"09e8430b-2226-4933-9dcf-ee3b5de076c3\" (UID: \"09e8430b-2226-4933-9dcf-ee3b5de076c3\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.243304 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/213bdee3-5fb6-4221-819e-43ec7a01f555-catalog-content\") pod \"213bdee3-5fb6-4221-819e-43ec7a01f555\" (UID: \"213bdee3-5fb6-4221-819e-43ec7a01f555\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.243337 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09e8430b-2226-4933-9dcf-ee3b5de076c3-utilities\") pod \"09e8430b-2226-4933-9dcf-ee3b5de076c3\" (UID: \"09e8430b-2226-4933-9dcf-ee3b5de076c3\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.243343 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.243362 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.243389 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/646a2a29-480a-4725-9407-80d8a4f2a4bb-utilities\") pod \"646a2a29-480a-4725-9407-80d8a4f2a4bb\" (UID: \"646a2a29-480a-4725-9407-80d8a4f2a4bb\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.243430 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/646a2a29-480a-4725-9407-80d8a4f2a4bb-catalog-content\") pod \"646a2a29-480a-4725-9407-80d8a4f2a4bb\" (UID: \"646a2a29-480a-4725-9407-80d8a4f2a4bb\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.243451 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfw4x\" (UniqueName: \"kubernetes.io/projected/09e8430b-2226-4933-9dcf-ee3b5de076c3-kube-api-access-zfw4x\") pod \"09e8430b-2226-4933-9dcf-ee3b5de076c3\" (UID: \"09e8430b-2226-4933-9dcf-ee3b5de076c3\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.243480 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxxrx\" (UniqueName: \"kubernetes.io/projected/0c61312d-523c-44a8-a451-dc96bba0f6d7-kube-api-access-wxxrx\") pod \"0c61312d-523c-44a8-a451-dc96bba0f6d7\" (UID: \"0c61312d-523c-44a8-a451-dc96bba0f6d7\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.243507 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v57hq\" (UniqueName: \"kubernetes.io/projected/646a2a29-480a-4725-9407-80d8a4f2a4bb-kube-api-access-v57hq\") pod \"646a2a29-480a-4725-9407-80d8a4f2a4bb\" (UID: \"646a2a29-480a-4725-9407-80d8a4f2a4bb\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244167 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244207 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00f7dbc0-5b6f-4f74-a4e4-43759758be95-marketplace-trusted-ca\") pod \"00f7dbc0-5b6f-4f74-a4e4-43759758be95\" (UID: \"00f7dbc0-5b6f-4f74-a4e4-43759758be95\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244236 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-klnm2\" (UniqueName: \"kubernetes.io/projected/9fca9021-9a9f-4d5d-9892-37f39c580323-kube-api-access-klnm2\") pod \"9fca9021-9a9f-4d5d-9892-37f39c580323\" (UID: \"9fca9021-9a9f-4d5d-9892-37f39c580323\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244278 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-knccb\" (UniqueName: \"kubernetes.io/projected/8cd40416-92d2-41ec-b6ae-ba668ccc5685-kube-api-access-knccb\") pod \"8cd40416-92d2-41ec-b6ae-ba668ccc5685\" (UID: \"8cd40416-92d2-41ec-b6ae-ba668ccc5685\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244304 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c61312d-523c-44a8-a451-dc96bba0f6d7-utilities\") pod \"0c61312d-523c-44a8-a451-dc96bba0f6d7\" (UID: \"0c61312d-523c-44a8-a451-dc96bba0f6d7\") " Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244639 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/907b18d4-d2b5-47c9-9c70-716bd64330ae-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244661 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4psmc\" (UniqueName: \"kubernetes.io/projected/00f7dbc0-5b6f-4f74-a4e4-43759758be95-kube-api-access-4psmc\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244675 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/213bdee3-5fb6-4221-819e-43ec7a01f555-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244687 4690 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244699 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9fca9021-9a9f-4d5d-9892-37f39c580323-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244710 4690 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/00f7dbc0-5b6f-4f74-a4e4-43759758be95-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244723 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j96wl\" (UniqueName: \"kubernetes.io/projected/907b18d4-d2b5-47c9-9c70-716bd64330ae-kube-api-access-j96wl\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244736 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cd40416-92d2-41ec-b6ae-ba668ccc5685-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244749 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwgxp\" (UniqueName: \"kubernetes.io/projected/213bdee3-5fb6-4221-819e-43ec7a01f555-kube-api-access-pwgxp\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244711 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/646a2a29-480a-4725-9407-80d8a4f2a4bb-utilities" (OuterVolumeSpecName: "utilities") pod "646a2a29-480a-4725-9407-80d8a4f2a4bb" (UID: "646a2a29-480a-4725-9407-80d8a4f2a4bb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244749 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09e8430b-2226-4933-9dcf-ee3b5de076c3-utilities" (OuterVolumeSpecName: "utilities") pod "09e8430b-2226-4933-9dcf-ee3b5de076c3" (UID: "09e8430b-2226-4933-9dcf-ee3b5de076c3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244744 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.244766 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.245462 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00f7dbc0-5b6f-4f74-a4e4-43759758be95-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "00f7dbc0-5b6f-4f74-a4e4-43759758be95" (UID: "00f7dbc0-5b6f-4f74-a4e4-43759758be95"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.246732 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c61312d-523c-44a8-a451-dc96bba0f6d7-utilities" (OuterVolumeSpecName: "utilities") pod "0c61312d-523c-44a8-a451-dc96bba0f6d7" (UID: "0c61312d-523c-44a8-a451-dc96bba0f6d7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.248345 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9fca9021-9a9f-4d5d-9892-37f39c580323-kube-api-access-klnm2" (OuterVolumeSpecName: "kube-api-access-klnm2") pod "9fca9021-9a9f-4d5d-9892-37f39c580323" (UID: "9fca9021-9a9f-4d5d-9892-37f39c580323"). InnerVolumeSpecName "kube-api-access-klnm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.248842 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09e8430b-2226-4933-9dcf-ee3b5de076c3-kube-api-access-zfw4x" (OuterVolumeSpecName: "kube-api-access-zfw4x") pod "09e8430b-2226-4933-9dcf-ee3b5de076c3" (UID: "09e8430b-2226-4933-9dcf-ee3b5de076c3"). InnerVolumeSpecName "kube-api-access-zfw4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.249013 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c61312d-523c-44a8-a451-dc96bba0f6d7-kube-api-access-wxxrx" (OuterVolumeSpecName: "kube-api-access-wxxrx") pod "0c61312d-523c-44a8-a451-dc96bba0f6d7" (UID: "0c61312d-523c-44a8-a451-dc96bba0f6d7"). InnerVolumeSpecName "kube-api-access-wxxrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.249350 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/646a2a29-480a-4725-9407-80d8a4f2a4bb-kube-api-access-v57hq" (OuterVolumeSpecName: "kube-api-access-v57hq") pod "646a2a29-480a-4725-9407-80d8a4f2a4bb" (UID: "646a2a29-480a-4725-9407-80d8a4f2a4bb"). InnerVolumeSpecName "kube-api-access-v57hq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.250246 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cd40416-92d2-41ec-b6ae-ba668ccc5685-kube-api-access-knccb" (OuterVolumeSpecName: "kube-api-access-knccb") pod "8cd40416-92d2-41ec-b6ae-ba668ccc5685" (UID: "8cd40416-92d2-41ec-b6ae-ba668ccc5685"). InnerVolumeSpecName "kube-api-access-knccb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.345924 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/09e8430b-2226-4933-9dcf-ee3b5de076c3-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.346004 4690 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.346014 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/646a2a29-480a-4725-9407-80d8a4f2a4bb-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.346024 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfw4x\" (UniqueName: \"kubernetes.io/projected/09e8430b-2226-4933-9dcf-ee3b5de076c3-kube-api-access-zfw4x\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.346038 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxxrx\" (UniqueName: \"kubernetes.io/projected/0c61312d-523c-44a8-a451-dc96bba0f6d7-kube-api-access-wxxrx\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.346048 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v57hq\" (UniqueName: \"kubernetes.io/projected/646a2a29-480a-4725-9407-80d8a4f2a4bb-kube-api-access-v57hq\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.346056 4690 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.346067 4690 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/00f7dbc0-5b6f-4f74-a4e4-43759758be95-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.346077 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-klnm2\" (UniqueName: \"kubernetes.io/projected/9fca9021-9a9f-4d5d-9892-37f39c580323-kube-api-access-klnm2\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.346087 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-knccb\" (UniqueName: \"kubernetes.io/projected/8cd40416-92d2-41ec-b6ae-ba668ccc5685-kube-api-access-knccb\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.346096 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c61312d-523c-44a8-a451-dc96bba0f6d7-utilities\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.516926 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/213bdee3-5fb6-4221-819e-43ec7a01f555-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "213bdee3-5fb6-4221-819e-43ec7a01f555" (UID: "213bdee3-5fb6-4221-819e-43ec7a01f555"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.544727 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9fca9021-9a9f-4d5d-9892-37f39c580323-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9fca9021-9a9f-4d5d-9892-37f39c580323" (UID: "9fca9021-9a9f-4d5d-9892-37f39c580323"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.549089 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9fca9021-9a9f-4d5d-9892-37f39c580323-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.549127 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/213bdee3-5fb6-4221-819e-43ec7a01f555-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.562512 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/907b18d4-d2b5-47c9-9c70-716bd64330ae-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "907b18d4-d2b5-47c9-9c70-716bd64330ae" (UID: "907b18d4-d2b5-47c9-9c70-716bd64330ae"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.562588 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cd40416-92d2-41ec-b6ae-ba668ccc5685-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8cd40416-92d2-41ec-b6ae-ba668ccc5685" (UID: "8cd40416-92d2-41ec-b6ae-ba668ccc5685"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.574090 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/646a2a29-480a-4725-9407-80d8a4f2a4bb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "646a2a29-480a-4725-9407-80d8a4f2a4bb" (UID: "646a2a29-480a-4725-9407-80d8a4f2a4bb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.650370 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/646a2a29-480a-4725-9407-80d8a4f2a4bb-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.650654 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cd40416-92d2-41ec-b6ae-ba668ccc5685-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.650779 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/907b18d4-d2b5-47c9-9c70-716bd64330ae-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: E1211 14:18:37.721759 4690 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.51:6443: connect: connection refused" event=< Dec 11 14:18:37 crc kubenswrapper[4690]: &Event{ObjectMeta:{marketplace-operator-79b997595-7bxfp.18802ef9bbfd3991 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:marketplace-operator-79b997595-7bxfp,UID:203500e9-a305-42cd-9909-d64ca944d363,APIVersion:v1,ResourceVersion:29396,FieldPath:,},Reason:FailedCreatePodSandBox,Message:Failed to create pod sandbox: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-7bxfp_openshift-marketplace_203500e9-a305-42cd-9909-d64ca944d363_0(929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13): error adding pod openshift-marketplace_marketplace-operator-79b997595-7bxfp to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13" Netns:"/var/run/netns/e27a2426-a164-4a42-9b0f-8d41a5f46787" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-7bxfp;K8S_POD_INFRA_CONTAINER_ID=929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13;K8S_POD_UID=203500e9-a305-42cd-9909-d64ca944d363" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-7bxfp] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-7bxfp/203500e9-a305-42cd-9909-d64ca944d363]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-7bxfp?timeout=1m0s": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:18:37 crc kubenswrapper[4690]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"},Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 14:18:24.064903569 +0000 UTC m=+235.680305212,LastTimestamp:2025-12-11 14:18:24.064903569 +0000 UTC m=+235.680305212,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Dec 11 14:18:37 crc kubenswrapper[4690]: > Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.744898 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09e8430b-2226-4933-9dcf-ee3b5de076c3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "09e8430b-2226-4933-9dcf-ee3b5de076c3" (UID: "09e8430b-2226-4933-9dcf-ee3b5de076c3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.752306 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/09e8430b-2226-4933-9dcf-ee3b5de076c3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.874751 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-r7754" event={"ID":"ffb685f2-c2fc-4602-8d81-5f11b6581f29","Type":"ContainerDied","Data":"6e2823f11348fb19559feae969ea88b6c99bf154f1886c272b782576c545f7f6"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.874839 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-r7754" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.874858 4690 scope.go:117] "RemoveContainer" containerID="2ed5573f6b8998d3ae7701624250c0691f7a3c7f87a61806fbca3a7ee16328f7" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.876197 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.876632 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.877075 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.877259 4690 generic.go:334] "Generic (PLEG): container finished" podID="646a2a29-480a-4725-9407-80d8a4f2a4bb" containerID="cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503" exitCode=0 Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.877303 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.877338 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bfnc" event={"ID":"646a2a29-480a-4725-9407-80d8a4f2a4bb","Type":"ContainerDied","Data":"cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.877626 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-8bfnc" event={"ID":"646a2a29-480a-4725-9407-80d8a4f2a4bb","Type":"ContainerDied","Data":"5c48c22f578c7e27e1350aea0956820eda8ad6c16458898c8c74ccb8b7d8864e"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.877393 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-8bfnc" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.877640 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.877898 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.878146 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.878518 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.878846 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.879243 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.879609 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.879883 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.880161 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.880356 4690 generic.go:334] "Generic (PLEG): container finished" podID="44a7b31b-09dd-452b-87ba-29764eaa0206" containerID="0e4832b2b6dce8997cd247f1916ab6a197fa3f64006449b295e150efb8bd4eb7" exitCode=0 Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.880416 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z9662" event={"ID":"44a7b31b-09dd-452b-87ba-29764eaa0206","Type":"ContainerDied","Data":"0e4832b2b6dce8997cd247f1916ab6a197fa3f64006449b295e150efb8bd4eb7"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.880578 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.881034 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.881268 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.881555 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.881860 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.882040 4690 generic.go:334] "Generic (PLEG): container finished" podID="64df140d-3126-42f9-b4d2-a3488a27fb57" containerID="22af00ca16bc55b53d6dc08326b573905c5ccf753f5dcae674503f0d098e100e" exitCode=0 Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.882101 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"64df140d-3126-42f9-b4d2-a3488a27fb57","Type":"ContainerDied","Data":"22af00ca16bc55b53d6dc08326b573905c5ccf753f5dcae674503f0d098e100e"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.882259 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.882629 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.882970 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.883199 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.883400 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.883634 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.883848 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.884053 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.884152 4690 generic.go:334] "Generic (PLEG): container finished" podID="213bdee3-5fb6-4221-819e-43ec7a01f555" containerID="b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b" exitCode=0 Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.884207 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k2lrt" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.884226 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k2lrt" event={"ID":"213bdee3-5fb6-4221-819e-43ec7a01f555","Type":"ContainerDied","Data":"b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.884236 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.884258 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k2lrt" event={"ID":"213bdee3-5fb6-4221-819e-43ec7a01f555","Type":"ContainerDied","Data":"1dc9f244424a65e43dd272abaa9baeda104378db3b395aa15a88ed467af2e316"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.884503 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.884751 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.885017 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.885275 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.885784 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.886172 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.886343 4690 generic.go:334] "Generic (PLEG): container finished" podID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" containerID="b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806" exitCode=0 Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.886593 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fs97w" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.886599 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fs97w" event={"ID":"8cd40416-92d2-41ec-b6ae-ba668ccc5685","Type":"ContainerDied","Data":"b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.886898 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.886938 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fs97w" event={"ID":"8cd40416-92d2-41ec-b6ae-ba668ccc5685","Type":"ContainerDied","Data":"a834198ed9baf9d6ce8a33206ca0e602171592f634be7c55342b91fc3484b89b"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.887346 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.887593 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.887816 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.888091 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.888326 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.888526 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.888870 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.889133 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.889224 4690 scope.go:117] "RemoveContainer" containerID="62a8b1947c7d948f2fecbea6f9f5706f0fb7a775357e818dde42e687b87369d1" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.889467 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.889520 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.889465 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" event={"ID":"00f7dbc0-5b6f-4f74-a4e4-43759758be95","Type":"ContainerDied","Data":"07023e0f01fd7be1116f8f89dcf4d75ea50b93c4a5b45d7fec66cbf5d38a755a"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.889751 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.889947 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.890181 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.890496 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.890984 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.891315 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.891441 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"cecb33951fbd0bfbe17accfbc31acffae97524780d80b25f53c162d2c45ab45b"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.891568 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.891815 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.892044 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.892255 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.892571 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.893645 4690 generic.go:334] "Generic (PLEG): container finished" podID="0c61312d-523c-44a8-a451-dc96bba0f6d7" containerID="614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4" exitCode=0 Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.893679 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-24xfd" event={"ID":"0c61312d-523c-44a8-a451-dc96bba0f6d7","Type":"ContainerDied","Data":"614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.893702 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-24xfd" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.893712 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-24xfd" event={"ID":"0c61312d-523c-44a8-a451-dc96bba0f6d7","Type":"ContainerDied","Data":"8055ffde4bebb8658d3e65f630f5be61cfc071fd4175df8cb1589c8d290bc0e7"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.896736 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.897419 4690 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9db92bcfe7099e9a716af1be228bcdb626b2b2383c72901ebbd9ea1c1aceb5a0" exitCode=0 Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.897637 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.899776 4690 generic.go:334] "Generic (PLEG): container finished" podID="09e8430b-2226-4933-9dcf-ee3b5de076c3" containerID="f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4" exitCode=0 Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.899810 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xq68s" event={"ID":"09e8430b-2226-4933-9dcf-ee3b5de076c3","Type":"ContainerDied","Data":"f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.899846 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xq68s" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.899856 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xq68s" event={"ID":"09e8430b-2226-4933-9dcf-ee3b5de076c3","Type":"ContainerDied","Data":"6c6e5b10ab20157e83281f40f49ca885447f6de8380bdae723a926c7815273e8"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.902585 4690 generic.go:334] "Generic (PLEG): container finished" podID="907b18d4-d2b5-47c9-9c70-716bd64330ae" containerID="ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1" exitCode=0 Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.902659 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kmjlt" event={"ID":"907b18d4-d2b5-47c9-9c70-716bd64330ae","Type":"ContainerDied","Data":"ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.902690 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kmjlt" event={"ID":"907b18d4-d2b5-47c9-9c70-716bd64330ae","Type":"ContainerDied","Data":"9630c8bab4f380ae02382d3f978e6a624f0ce681a6207be9eadedb4f64916ce7"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.902806 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kmjlt" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.903995 4690 scope.go:117] "RemoveContainer" containerID="0d89986ecb6c5c063b8d37076880275ca2bdc91f593e17ac6848ea3da47e2b45" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.905811 4690 generic.go:334] "Generic (PLEG): container finished" podID="9fca9021-9a9f-4d5d-9892-37f39c580323" containerID="93f0ae52a70f9754902b1f3328ece5d657de29abec61d07297a34f8806e1f065" exitCode=0 Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.905862 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djmxn" event={"ID":"9fca9021-9a9f-4d5d-9892-37f39c580323","Type":"ContainerDied","Data":"93f0ae52a70f9754902b1f3328ece5d657de29abec61d07297a34f8806e1f065"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.905889 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"071dcc6b5227134aa7edd407cd17ec42007d5b85e0e5a79d82c2999eb083c682"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.905903 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cf7e1f734f99c12908234fbcb5581753a863e60d4b305ad4599bf66401789e60"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.905914 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-djmxn" event={"ID":"9fca9021-9a9f-4d5d-9892-37f39c580323","Type":"ContainerDied","Data":"bf7869d3bb518156bdb2191434241ee80efca3e2a95f832cc697029db8462933"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.905920 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"93f0ae52a70f9754902b1f3328ece5d657de29abec61d07297a34f8806e1f065"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.905928 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"071dcc6b5227134aa7edd407cd17ec42007d5b85e0e5a79d82c2999eb083c682"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.905933 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cf7e1f734f99c12908234fbcb5581753a863e60d4b305ad4599bf66401789e60"} Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.906007 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-djmxn" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.908790 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.928181 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.933029 4690 scope.go:117] "RemoveContainer" containerID="cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.950461 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.955594 4690 scope.go:117] "RemoveContainer" containerID="ba6527c8cd588d8d6d94596f287bcb155c65b626007d0dde03a7f497db1bffc6" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.968554 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.988936 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:37 crc kubenswrapper[4690]: I1211 14:18:37.994303 4690 scope.go:117] "RemoveContainer" containerID="fe51b94909bb6b47451400c94b18c317719c35854a48dc2f0b09bc571e58380c" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.009139 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.009274 4690 scope.go:117] "RemoveContainer" containerID="cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.009855 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503\": container with ID starting with cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503 not found: ID does not exist" containerID="cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.009908 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503"} err="failed to get container status \"cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503\": rpc error: code = NotFound desc = could not find container \"cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503\": container with ID starting with cf1b98866cf16ad569060775ea7f942c8c7dc2ca9c2ca55148cb3ddba0aef503 not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.009946 4690 scope.go:117] "RemoveContainer" containerID="ba6527c8cd588d8d6d94596f287bcb155c65b626007d0dde03a7f497db1bffc6" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.010417 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba6527c8cd588d8d6d94596f287bcb155c65b626007d0dde03a7f497db1bffc6\": container with ID starting with ba6527c8cd588d8d6d94596f287bcb155c65b626007d0dde03a7f497db1bffc6 not found: ID does not exist" containerID="ba6527c8cd588d8d6d94596f287bcb155c65b626007d0dde03a7f497db1bffc6" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.010454 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba6527c8cd588d8d6d94596f287bcb155c65b626007d0dde03a7f497db1bffc6"} err="failed to get container status \"ba6527c8cd588d8d6d94596f287bcb155c65b626007d0dde03a7f497db1bffc6\": rpc error: code = NotFound desc = could not find container \"ba6527c8cd588d8d6d94596f287bcb155c65b626007d0dde03a7f497db1bffc6\": container with ID starting with ba6527c8cd588d8d6d94596f287bcb155c65b626007d0dde03a7f497db1bffc6 not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.010476 4690 scope.go:117] "RemoveContainer" containerID="fe51b94909bb6b47451400c94b18c317719c35854a48dc2f0b09bc571e58380c" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.010915 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe51b94909bb6b47451400c94b18c317719c35854a48dc2f0b09bc571e58380c\": container with ID starting with fe51b94909bb6b47451400c94b18c317719c35854a48dc2f0b09bc571e58380c not found: ID does not exist" containerID="fe51b94909bb6b47451400c94b18c317719c35854a48dc2f0b09bc571e58380c" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.011003 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe51b94909bb6b47451400c94b18c317719c35854a48dc2f0b09bc571e58380c"} err="failed to get container status \"fe51b94909bb6b47451400c94b18c317719c35854a48dc2f0b09bc571e58380c\": rpc error: code = NotFound desc = could not find container \"fe51b94909bb6b47451400c94b18c317719c35854a48dc2f0b09bc571e58380c\": container with ID starting with fe51b94909bb6b47451400c94b18c317719c35854a48dc2f0b09bc571e58380c not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.011036 4690 scope.go:117] "RemoveContainer" containerID="b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.027666 4690 scope.go:117] "RemoveContainer" containerID="305761aace6f2c4ae9bb6a34588514d1e6fe31eb6c35a1cb270e9f11c09825b8" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.028229 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.046789 4690 scope.go:117] "RemoveContainer" containerID="963dde45b3cebc74b05ae4fa9f4e403600095dc5361ef8a089a3c567795d4ccf" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.048232 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.062326 4690 scope.go:117] "RemoveContainer" containerID="b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.062811 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b\": container with ID starting with b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b not found: ID does not exist" containerID="b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.062846 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b"} err="failed to get container status \"b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b\": rpc error: code = NotFound desc = could not find container \"b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b\": container with ID starting with b4291a04f112d43f306a3e8579adbbf3f4c9d9c46945b20ea8439be31149123b not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.062875 4690 scope.go:117] "RemoveContainer" containerID="305761aace6f2c4ae9bb6a34588514d1e6fe31eb6c35a1cb270e9f11c09825b8" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.063366 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"305761aace6f2c4ae9bb6a34588514d1e6fe31eb6c35a1cb270e9f11c09825b8\": container with ID starting with 305761aace6f2c4ae9bb6a34588514d1e6fe31eb6c35a1cb270e9f11c09825b8 not found: ID does not exist" containerID="305761aace6f2c4ae9bb6a34588514d1e6fe31eb6c35a1cb270e9f11c09825b8" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.063400 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"305761aace6f2c4ae9bb6a34588514d1e6fe31eb6c35a1cb270e9f11c09825b8"} err="failed to get container status \"305761aace6f2c4ae9bb6a34588514d1e6fe31eb6c35a1cb270e9f11c09825b8\": rpc error: code = NotFound desc = could not find container \"305761aace6f2c4ae9bb6a34588514d1e6fe31eb6c35a1cb270e9f11c09825b8\": container with ID starting with 305761aace6f2c4ae9bb6a34588514d1e6fe31eb6c35a1cb270e9f11c09825b8 not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.063422 4690 scope.go:117] "RemoveContainer" containerID="963dde45b3cebc74b05ae4fa9f4e403600095dc5361ef8a089a3c567795d4ccf" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.063886 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"963dde45b3cebc74b05ae4fa9f4e403600095dc5361ef8a089a3c567795d4ccf\": container with ID starting with 963dde45b3cebc74b05ae4fa9f4e403600095dc5361ef8a089a3c567795d4ccf not found: ID does not exist" containerID="963dde45b3cebc74b05ae4fa9f4e403600095dc5361ef8a089a3c567795d4ccf" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.063915 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"963dde45b3cebc74b05ae4fa9f4e403600095dc5361ef8a089a3c567795d4ccf"} err="failed to get container status \"963dde45b3cebc74b05ae4fa9f4e403600095dc5361ef8a089a3c567795d4ccf\": rpc error: code = NotFound desc = could not find container \"963dde45b3cebc74b05ae4fa9f4e403600095dc5361ef8a089a3c567795d4ccf\": container with ID starting with 963dde45b3cebc74b05ae4fa9f4e403600095dc5361ef8a089a3c567795d4ccf not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.063935 4690 scope.go:117] "RemoveContainer" containerID="b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.068177 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.089761 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.094930 4690 scope.go:117] "RemoveContainer" containerID="4f017cb839d1c26b09b8a30b89ce3fe0695de7014356a1205e85377e27fb6986" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.108842 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.109978 4690 scope.go:117] "RemoveContainer" containerID="7d1779c6af101abec489772c87b0d83f77b012c22810a65719fce0232af65a3d" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.125231 4690 scope.go:117] "RemoveContainer" containerID="b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.125782 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806\": container with ID starting with b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806 not found: ID does not exist" containerID="b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.125849 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806"} err="failed to get container status \"b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806\": rpc error: code = NotFound desc = could not find container \"b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806\": container with ID starting with b4281341bbfe26d7e1a1101547c2c2cf3f249da876adabeda8c39573d0a31806 not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.125882 4690 scope.go:117] "RemoveContainer" containerID="4f017cb839d1c26b09b8a30b89ce3fe0695de7014356a1205e85377e27fb6986" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.126402 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f017cb839d1c26b09b8a30b89ce3fe0695de7014356a1205e85377e27fb6986\": container with ID starting with 4f017cb839d1c26b09b8a30b89ce3fe0695de7014356a1205e85377e27fb6986 not found: ID does not exist" containerID="4f017cb839d1c26b09b8a30b89ce3fe0695de7014356a1205e85377e27fb6986" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.126473 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f017cb839d1c26b09b8a30b89ce3fe0695de7014356a1205e85377e27fb6986"} err="failed to get container status \"4f017cb839d1c26b09b8a30b89ce3fe0695de7014356a1205e85377e27fb6986\": rpc error: code = NotFound desc = could not find container \"4f017cb839d1c26b09b8a30b89ce3fe0695de7014356a1205e85377e27fb6986\": container with ID starting with 4f017cb839d1c26b09b8a30b89ce3fe0695de7014356a1205e85377e27fb6986 not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.126521 4690 scope.go:117] "RemoveContainer" containerID="7d1779c6af101abec489772c87b0d83f77b012c22810a65719fce0232af65a3d" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.127201 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d1779c6af101abec489772c87b0d83f77b012c22810a65719fce0232af65a3d\": container with ID starting with 7d1779c6af101abec489772c87b0d83f77b012c22810a65719fce0232af65a3d not found: ID does not exist" containerID="7d1779c6af101abec489772c87b0d83f77b012c22810a65719fce0232af65a3d" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.127329 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d1779c6af101abec489772c87b0d83f77b012c22810a65719fce0232af65a3d"} err="failed to get container status \"7d1779c6af101abec489772c87b0d83f77b012c22810a65719fce0232af65a3d\": rpc error: code = NotFound desc = could not find container \"7d1779c6af101abec489772c87b0d83f77b012c22810a65719fce0232af65a3d\": container with ID starting with 7d1779c6af101abec489772c87b0d83f77b012c22810a65719fce0232af65a3d not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.127423 4690 scope.go:117] "RemoveContainer" containerID="c7efa6125e6661f92724567c72836d33c207ea4ed35bcb7be157fa116ad5ccbb" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.128255 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.148094 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.168257 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.188034 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.188907 4690 scope.go:117] "RemoveContainer" containerID="614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.205294 4690 scope.go:117] "RemoveContainer" containerID="f9b8770287cb42349191b0d3035d30c84198fead1ff6129dbe148e5c34e471e5" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.208364 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.227776 4690 scope.go:117] "RemoveContainer" containerID="e7592b3536fe97a6b7c1e71f26da94a6920897b250944f2116074ce5f99f9db7" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.228213 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.243243 4690 scope.go:117] "RemoveContainer" containerID="614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.243740 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4\": container with ID starting with 614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4 not found: ID does not exist" containerID="614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.243775 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4"} err="failed to get container status \"614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4\": rpc error: code = NotFound desc = could not find container \"614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4\": container with ID starting with 614b183d102a051e72e4a27682acbec1c8f7884b0b34ca754db3d0bd442566f4 not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.243802 4690 scope.go:117] "RemoveContainer" containerID="f9b8770287cb42349191b0d3035d30c84198fead1ff6129dbe148e5c34e471e5" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.244399 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9b8770287cb42349191b0d3035d30c84198fead1ff6129dbe148e5c34e471e5\": container with ID starting with f9b8770287cb42349191b0d3035d30c84198fead1ff6129dbe148e5c34e471e5 not found: ID does not exist" containerID="f9b8770287cb42349191b0d3035d30c84198fead1ff6129dbe148e5c34e471e5" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.244478 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9b8770287cb42349191b0d3035d30c84198fead1ff6129dbe148e5c34e471e5"} err="failed to get container status \"f9b8770287cb42349191b0d3035d30c84198fead1ff6129dbe148e5c34e471e5\": rpc error: code = NotFound desc = could not find container \"f9b8770287cb42349191b0d3035d30c84198fead1ff6129dbe148e5c34e471e5\": container with ID starting with f9b8770287cb42349191b0d3035d30c84198fead1ff6129dbe148e5c34e471e5 not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.244525 4690 scope.go:117] "RemoveContainer" containerID="e7592b3536fe97a6b7c1e71f26da94a6920897b250944f2116074ce5f99f9db7" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.245097 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7592b3536fe97a6b7c1e71f26da94a6920897b250944f2116074ce5f99f9db7\": container with ID starting with e7592b3536fe97a6b7c1e71f26da94a6920897b250944f2116074ce5f99f9db7 not found: ID does not exist" containerID="e7592b3536fe97a6b7c1e71f26da94a6920897b250944f2116074ce5f99f9db7" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.245121 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7592b3536fe97a6b7c1e71f26da94a6920897b250944f2116074ce5f99f9db7"} err="failed to get container status \"e7592b3536fe97a6b7c1e71f26da94a6920897b250944f2116074ce5f99f9db7\": rpc error: code = NotFound desc = could not find container \"e7592b3536fe97a6b7c1e71f26da94a6920897b250944f2116074ce5f99f9db7\": container with ID starting with e7592b3536fe97a6b7c1e71f26da94a6920897b250944f2116074ce5f99f9db7 not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.245140 4690 scope.go:117] "RemoveContainer" containerID="9a8a4902e0c6ee75ff794209a28d8f53bd597ce51f863529640262e123086e81" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.248786 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.269051 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.289014 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.308451 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.328381 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.346125 4690 scope.go:117] "RemoveContainer" containerID="ad35e4e1b19e2eb60eaf01f14cac102b2923d73b6554b5313f9b03341bdbe2b0" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.348178 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.363783 4690 scope.go:117] "RemoveContainer" containerID="d35a4b3df00b428cbc8e2c97e31b53f1c712b14e5675c5dc1de3008e35d13f74" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.380195 4690 scope.go:117] "RemoveContainer" containerID="48702dab61bba8195ae012efbdf07816dddea37d296c326397f4fcbdfe766e3e" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.396818 4690 scope.go:117] "RemoveContainer" containerID="9db92bcfe7099e9a716af1be228bcdb626b2b2383c72901ebbd9ea1c1aceb5a0" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.415143 4690 scope.go:117] "RemoveContainer" containerID="a0d55fe16fad52ea58196b3e2c6f392cf5390b0ab0ec231dba2bedae56faac19" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.435242 4690 scope.go:117] "RemoveContainer" containerID="9a8a4902e0c6ee75ff794209a28d8f53bd597ce51f863529640262e123086e81" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.436319 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a8a4902e0c6ee75ff794209a28d8f53bd597ce51f863529640262e123086e81\": container with ID starting with 9a8a4902e0c6ee75ff794209a28d8f53bd597ce51f863529640262e123086e81 not found: ID does not exist" containerID="9a8a4902e0c6ee75ff794209a28d8f53bd597ce51f863529640262e123086e81" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.436381 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a8a4902e0c6ee75ff794209a28d8f53bd597ce51f863529640262e123086e81"} err="failed to get container status \"9a8a4902e0c6ee75ff794209a28d8f53bd597ce51f863529640262e123086e81\": rpc error: code = NotFound desc = could not find container \"9a8a4902e0c6ee75ff794209a28d8f53bd597ce51f863529640262e123086e81\": container with ID starting with 9a8a4902e0c6ee75ff794209a28d8f53bd597ce51f863529640262e123086e81 not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.436436 4690 scope.go:117] "RemoveContainer" containerID="ad35e4e1b19e2eb60eaf01f14cac102b2923d73b6554b5313f9b03341bdbe2b0" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.437012 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad35e4e1b19e2eb60eaf01f14cac102b2923d73b6554b5313f9b03341bdbe2b0\": container with ID starting with ad35e4e1b19e2eb60eaf01f14cac102b2923d73b6554b5313f9b03341bdbe2b0 not found: ID does not exist" containerID="ad35e4e1b19e2eb60eaf01f14cac102b2923d73b6554b5313f9b03341bdbe2b0" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.437036 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad35e4e1b19e2eb60eaf01f14cac102b2923d73b6554b5313f9b03341bdbe2b0"} err="failed to get container status \"ad35e4e1b19e2eb60eaf01f14cac102b2923d73b6554b5313f9b03341bdbe2b0\": rpc error: code = NotFound desc = could not find container \"ad35e4e1b19e2eb60eaf01f14cac102b2923d73b6554b5313f9b03341bdbe2b0\": container with ID starting with ad35e4e1b19e2eb60eaf01f14cac102b2923d73b6554b5313f9b03341bdbe2b0 not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.437052 4690 scope.go:117] "RemoveContainer" containerID="d35a4b3df00b428cbc8e2c97e31b53f1c712b14e5675c5dc1de3008e35d13f74" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.437320 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d35a4b3df00b428cbc8e2c97e31b53f1c712b14e5675c5dc1de3008e35d13f74\": container with ID starting with d35a4b3df00b428cbc8e2c97e31b53f1c712b14e5675c5dc1de3008e35d13f74 not found: ID does not exist" containerID="d35a4b3df00b428cbc8e2c97e31b53f1c712b14e5675c5dc1de3008e35d13f74" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.437337 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d35a4b3df00b428cbc8e2c97e31b53f1c712b14e5675c5dc1de3008e35d13f74"} err="failed to get container status \"d35a4b3df00b428cbc8e2c97e31b53f1c712b14e5675c5dc1de3008e35d13f74\": rpc error: code = NotFound desc = could not find container \"d35a4b3df00b428cbc8e2c97e31b53f1c712b14e5675c5dc1de3008e35d13f74\": container with ID starting with d35a4b3df00b428cbc8e2c97e31b53f1c712b14e5675c5dc1de3008e35d13f74 not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.437348 4690 scope.go:117] "RemoveContainer" containerID="48702dab61bba8195ae012efbdf07816dddea37d296c326397f4fcbdfe766e3e" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.437624 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"48702dab61bba8195ae012efbdf07816dddea37d296c326397f4fcbdfe766e3e\": container with ID starting with 48702dab61bba8195ae012efbdf07816dddea37d296c326397f4fcbdfe766e3e not found: ID does not exist" containerID="48702dab61bba8195ae012efbdf07816dddea37d296c326397f4fcbdfe766e3e" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.437647 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"48702dab61bba8195ae012efbdf07816dddea37d296c326397f4fcbdfe766e3e"} err="failed to get container status \"48702dab61bba8195ae012efbdf07816dddea37d296c326397f4fcbdfe766e3e\": rpc error: code = NotFound desc = could not find container \"48702dab61bba8195ae012efbdf07816dddea37d296c326397f4fcbdfe766e3e\": container with ID starting with 48702dab61bba8195ae012efbdf07816dddea37d296c326397f4fcbdfe766e3e not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.437660 4690 scope.go:117] "RemoveContainer" containerID="9db92bcfe7099e9a716af1be228bcdb626b2b2383c72901ebbd9ea1c1aceb5a0" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.437973 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9db92bcfe7099e9a716af1be228bcdb626b2b2383c72901ebbd9ea1c1aceb5a0\": container with ID starting with 9db92bcfe7099e9a716af1be228bcdb626b2b2383c72901ebbd9ea1c1aceb5a0 not found: ID does not exist" containerID="9db92bcfe7099e9a716af1be228bcdb626b2b2383c72901ebbd9ea1c1aceb5a0" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.437996 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9db92bcfe7099e9a716af1be228bcdb626b2b2383c72901ebbd9ea1c1aceb5a0"} err="failed to get container status \"9db92bcfe7099e9a716af1be228bcdb626b2b2383c72901ebbd9ea1c1aceb5a0\": rpc error: code = NotFound desc = could not find container \"9db92bcfe7099e9a716af1be228bcdb626b2b2383c72901ebbd9ea1c1aceb5a0\": container with ID starting with 9db92bcfe7099e9a716af1be228bcdb626b2b2383c72901ebbd9ea1c1aceb5a0 not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.438010 4690 scope.go:117] "RemoveContainer" containerID="a0d55fe16fad52ea58196b3e2c6f392cf5390b0ab0ec231dba2bedae56faac19" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.438232 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0d55fe16fad52ea58196b3e2c6f392cf5390b0ab0ec231dba2bedae56faac19\": container with ID starting with a0d55fe16fad52ea58196b3e2c6f392cf5390b0ab0ec231dba2bedae56faac19 not found: ID does not exist" containerID="a0d55fe16fad52ea58196b3e2c6f392cf5390b0ab0ec231dba2bedae56faac19" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.438735 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0d55fe16fad52ea58196b3e2c6f392cf5390b0ab0ec231dba2bedae56faac19"} err="failed to get container status \"a0d55fe16fad52ea58196b3e2c6f392cf5390b0ab0ec231dba2bedae56faac19\": rpc error: code = NotFound desc = could not find container \"a0d55fe16fad52ea58196b3e2c6f392cf5390b0ab0ec231dba2bedae56faac19\": container with ID starting with a0d55fe16fad52ea58196b3e2c6f392cf5390b0ab0ec231dba2bedae56faac19 not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.438783 4690 scope.go:117] "RemoveContainer" containerID="f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.452473 4690 scope.go:117] "RemoveContainer" containerID="c706f42716c86d5638dc57c2356aaf2dcab933dfe85672bc84d377553a86dede" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.470257 4690 scope.go:117] "RemoveContainer" containerID="4ac0da69a5417465967ea7f81d4b6dd8688fc1eb132a26c3a9eff3218ddbb03d" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.486946 4690 scope.go:117] "RemoveContainer" containerID="f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.487570 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4\": container with ID starting with f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4 not found: ID does not exist" containerID="f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.487611 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4"} err="failed to get container status \"f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4\": rpc error: code = NotFound desc = could not find container \"f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4\": container with ID starting with f11fc3b7481a7a453f4bb262a76bc39ab0e0eeda3de4e40e2a00a9ff0cefc2c4 not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.487659 4690 scope.go:117] "RemoveContainer" containerID="c706f42716c86d5638dc57c2356aaf2dcab933dfe85672bc84d377553a86dede" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.488344 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c706f42716c86d5638dc57c2356aaf2dcab933dfe85672bc84d377553a86dede\": container with ID starting with c706f42716c86d5638dc57c2356aaf2dcab933dfe85672bc84d377553a86dede not found: ID does not exist" containerID="c706f42716c86d5638dc57c2356aaf2dcab933dfe85672bc84d377553a86dede" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.488420 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c706f42716c86d5638dc57c2356aaf2dcab933dfe85672bc84d377553a86dede"} err="failed to get container status \"c706f42716c86d5638dc57c2356aaf2dcab933dfe85672bc84d377553a86dede\": rpc error: code = NotFound desc = could not find container \"c706f42716c86d5638dc57c2356aaf2dcab933dfe85672bc84d377553a86dede\": container with ID starting with c706f42716c86d5638dc57c2356aaf2dcab933dfe85672bc84d377553a86dede not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.488467 4690 scope.go:117] "RemoveContainer" containerID="4ac0da69a5417465967ea7f81d4b6dd8688fc1eb132a26c3a9eff3218ddbb03d" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.488940 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ac0da69a5417465967ea7f81d4b6dd8688fc1eb132a26c3a9eff3218ddbb03d\": container with ID starting with 4ac0da69a5417465967ea7f81d4b6dd8688fc1eb132a26c3a9eff3218ddbb03d not found: ID does not exist" containerID="4ac0da69a5417465967ea7f81d4b6dd8688fc1eb132a26c3a9eff3218ddbb03d" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.488985 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ac0da69a5417465967ea7f81d4b6dd8688fc1eb132a26c3a9eff3218ddbb03d"} err="failed to get container status \"4ac0da69a5417465967ea7f81d4b6dd8688fc1eb132a26c3a9eff3218ddbb03d\": rpc error: code = NotFound desc = could not find container \"4ac0da69a5417465967ea7f81d4b6dd8688fc1eb132a26c3a9eff3218ddbb03d\": container with ID starting with 4ac0da69a5417465967ea7f81d4b6dd8688fc1eb132a26c3a9eff3218ddbb03d not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.489004 4690 scope.go:117] "RemoveContainer" containerID="ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.505844 4690 scope.go:117] "RemoveContainer" containerID="50bdcf3c73cbad74f028bc28f621ecce617ac58904d367b01795ab8beb1861a3" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.545116 4690 scope.go:117] "RemoveContainer" containerID="4d7a042256371098662d65e6df7b034e5322a4d1e6e2ea8586b4a5913b124f5b" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.560237 4690 scope.go:117] "RemoveContainer" containerID="ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.560827 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1\": container with ID starting with ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1 not found: ID does not exist" containerID="ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.560897 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1"} err="failed to get container status \"ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1\": rpc error: code = NotFound desc = could not find container \"ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1\": container with ID starting with ffec73ba076970930d0505d6c3b38af2d61798547c42c8f96e64c100df2b76f1 not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.560941 4690 scope.go:117] "RemoveContainer" containerID="50bdcf3c73cbad74f028bc28f621ecce617ac58904d367b01795ab8beb1861a3" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.561488 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"50bdcf3c73cbad74f028bc28f621ecce617ac58904d367b01795ab8beb1861a3\": container with ID starting with 50bdcf3c73cbad74f028bc28f621ecce617ac58904d367b01795ab8beb1861a3 not found: ID does not exist" containerID="50bdcf3c73cbad74f028bc28f621ecce617ac58904d367b01795ab8beb1861a3" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.561529 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"50bdcf3c73cbad74f028bc28f621ecce617ac58904d367b01795ab8beb1861a3"} err="failed to get container status \"50bdcf3c73cbad74f028bc28f621ecce617ac58904d367b01795ab8beb1861a3\": rpc error: code = NotFound desc = could not find container \"50bdcf3c73cbad74f028bc28f621ecce617ac58904d367b01795ab8beb1861a3\": container with ID starting with 50bdcf3c73cbad74f028bc28f621ecce617ac58904d367b01795ab8beb1861a3 not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.561559 4690 scope.go:117] "RemoveContainer" containerID="4d7a042256371098662d65e6df7b034e5322a4d1e6e2ea8586b4a5913b124f5b" Dec 11 14:18:38 crc kubenswrapper[4690]: E1211 14:18:38.562035 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d7a042256371098662d65e6df7b034e5322a4d1e6e2ea8586b4a5913b124f5b\": container with ID starting with 4d7a042256371098662d65e6df7b034e5322a4d1e6e2ea8586b4a5913b124f5b not found: ID does not exist" containerID="4d7a042256371098662d65e6df7b034e5322a4d1e6e2ea8586b4a5913b124f5b" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.562090 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d7a042256371098662d65e6df7b034e5322a4d1e6e2ea8586b4a5913b124f5b"} err="failed to get container status \"4d7a042256371098662d65e6df7b034e5322a4d1e6e2ea8586b4a5913b124f5b\": rpc error: code = NotFound desc = could not find container \"4d7a042256371098662d65e6df7b034e5322a4d1e6e2ea8586b4a5913b124f5b\": container with ID starting with 4d7a042256371098662d65e6df7b034e5322a4d1e6e2ea8586b4a5913b124f5b not found: ID does not exist" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.562129 4690 scope.go:117] "RemoveContainer" containerID="93f0ae52a70f9754902b1f3328ece5d657de29abec61d07297a34f8806e1f065" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.632892 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.633412 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.633932 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.634252 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.634535 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.634901 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.635563 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.635891 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.636289 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.636762 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.637209 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.637574 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.640454 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.914539 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.914582 4690 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="6905bfef4007acd368e6ba2e53e2eb4236b1f05b4e642b5ed5991abe0fe3ac05" exitCode=1 Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.914628 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"6905bfef4007acd368e6ba2e53e2eb4236b1f05b4e642b5ed5991abe0fe3ac05"} Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.915192 4690 scope.go:117] "RemoveContainer" containerID="6905bfef4007acd368e6ba2e53e2eb4236b1f05b4e642b5ed5991abe0fe3ac05" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.915409 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.915691 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.916099 4690 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.917123 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.918259 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.918990 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.919221 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.919462 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.919558 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z9662" event={"ID":"44a7b31b-09dd-452b-87ba-29764eaa0206","Type":"ContainerStarted","Data":"1c80cd2a90322545319a0730a69c8f3377cd248081b58abd3a0b769926d09d16"} Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.919748 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.920208 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.920517 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:38 crc kubenswrapper[4690]: I1211 14:18:38.921085 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.166483 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.167679 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.168239 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.168769 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.169124 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.169397 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.169692 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.170020 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.170244 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.170485 4690 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.170744 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.171040 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.171263 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.270218 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/64df140d-3126-42f9-b4d2-a3488a27fb57-var-lock\") pod \"64df140d-3126-42f9-b4d2-a3488a27fb57\" (UID: \"64df140d-3126-42f9-b4d2-a3488a27fb57\") " Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.270291 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/64df140d-3126-42f9-b4d2-a3488a27fb57-kubelet-dir\") pod \"64df140d-3126-42f9-b4d2-a3488a27fb57\" (UID: \"64df140d-3126-42f9-b4d2-a3488a27fb57\") " Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.270358 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64df140d-3126-42f9-b4d2-a3488a27fb57-kube-api-access\") pod \"64df140d-3126-42f9-b4d2-a3488a27fb57\" (UID: \"64df140d-3126-42f9-b4d2-a3488a27fb57\") " Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.270455 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64df140d-3126-42f9-b4d2-a3488a27fb57-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "64df140d-3126-42f9-b4d2-a3488a27fb57" (UID: "64df140d-3126-42f9-b4d2-a3488a27fb57"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.270473 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64df140d-3126-42f9-b4d2-a3488a27fb57-var-lock" (OuterVolumeSpecName: "var-lock") pod "64df140d-3126-42f9-b4d2-a3488a27fb57" (UID: "64df140d-3126-42f9-b4d2-a3488a27fb57"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.270823 4690 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/64df140d-3126-42f9-b4d2-a3488a27fb57-var-lock\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.270849 4690 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/64df140d-3126-42f9-b4d2-a3488a27fb57-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.276966 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64df140d-3126-42f9-b4d2-a3488a27fb57-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "64df140d-3126-42f9-b4d2-a3488a27fb57" (UID: "64df140d-3126-42f9-b4d2-a3488a27fb57"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.371614 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/64df140d-3126-42f9-b4d2-a3488a27fb57-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.934572 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"64df140d-3126-42f9-b4d2-a3488a27fb57","Type":"ContainerDied","Data":"c6cf04c44fe0bebecb993ae42dff10f6a7abeed6146f73d64cdb5f3e25b29f53"} Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.934621 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c6cf04c44fe0bebecb993ae42dff10f6a7abeed6146f73d64cdb5f3e25b29f53" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.934644 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.947775 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.947944 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.948315 4690 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.948906 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.949208 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.949555 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.950138 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.950433 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.950689 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.951005 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.951249 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:39 crc kubenswrapper[4690]: I1211 14:18:39.951532 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:40 crc kubenswrapper[4690]: I1211 14:18:40.246034 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:18:40 crc kubenswrapper[4690]: I1211 14:18:40.630483 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:40 crc kubenswrapper[4690]: I1211 14:18:40.631296 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:40 crc kubenswrapper[4690]: E1211 14:18:40.966812 4690 log.go:32] "RunPodSandbox from runtime service failed" err=< Dec 11 14:18:40 crc kubenswrapper[4690]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-7bxfp_openshift-marketplace_203500e9-a305-42cd-9909-d64ca944d363_0(8f05dd712fbd9acdb50940799be172d5091170b7bcc21f1c57305e76a4b4631f): error adding pod openshift-marketplace_marketplace-operator-79b997595-7bxfp to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"8f05dd712fbd9acdb50940799be172d5091170b7bcc21f1c57305e76a4b4631f" Netns:"/var/run/netns/b8a9320d-11d7-489f-81e7-8696a3c4b783" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-7bxfp;K8S_POD_INFRA_CONTAINER_ID=8f05dd712fbd9acdb50940799be172d5091170b7bcc21f1c57305e76a4b4631f;K8S_POD_UID=203500e9-a305-42cd-9909-d64ca944d363" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-7bxfp] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-7bxfp/203500e9-a305-42cd-9909-d64ca944d363]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-7bxfp?timeout=1m0s": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:18:40 crc kubenswrapper[4690]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 11 14:18:40 crc kubenswrapper[4690]: > Dec 11 14:18:40 crc kubenswrapper[4690]: E1211 14:18:40.967196 4690 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Dec 11 14:18:40 crc kubenswrapper[4690]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-7bxfp_openshift-marketplace_203500e9-a305-42cd-9909-d64ca944d363_0(8f05dd712fbd9acdb50940799be172d5091170b7bcc21f1c57305e76a4b4631f): error adding pod openshift-marketplace_marketplace-operator-79b997595-7bxfp to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"8f05dd712fbd9acdb50940799be172d5091170b7bcc21f1c57305e76a4b4631f" Netns:"/var/run/netns/b8a9320d-11d7-489f-81e7-8696a3c4b783" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-7bxfp;K8S_POD_INFRA_CONTAINER_ID=8f05dd712fbd9acdb50940799be172d5091170b7bcc21f1c57305e76a4b4631f;K8S_POD_UID=203500e9-a305-42cd-9909-d64ca944d363" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-7bxfp] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-7bxfp/203500e9-a305-42cd-9909-d64ca944d363]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-7bxfp?timeout=1m0s": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:18:40 crc kubenswrapper[4690]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 11 14:18:40 crc kubenswrapper[4690]: > pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:40 crc kubenswrapper[4690]: E1211 14:18:40.967217 4690 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err=< Dec 11 14:18:40 crc kubenswrapper[4690]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-7bxfp_openshift-marketplace_203500e9-a305-42cd-9909-d64ca944d363_0(8f05dd712fbd9acdb50940799be172d5091170b7bcc21f1c57305e76a4b4631f): error adding pod openshift-marketplace_marketplace-operator-79b997595-7bxfp to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"8f05dd712fbd9acdb50940799be172d5091170b7bcc21f1c57305e76a4b4631f" Netns:"/var/run/netns/b8a9320d-11d7-489f-81e7-8696a3c4b783" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-7bxfp;K8S_POD_INFRA_CONTAINER_ID=8f05dd712fbd9acdb50940799be172d5091170b7bcc21f1c57305e76a4b4631f;K8S_POD_UID=203500e9-a305-42cd-9909-d64ca944d363" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-7bxfp] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-7bxfp/203500e9-a305-42cd-9909-d64ca944d363]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-7bxfp?timeout=1m0s": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:18:40 crc kubenswrapper[4690]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Dec 11 14:18:40 crc kubenswrapper[4690]: > pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:40 crc kubenswrapper[4690]: E1211 14:18:40.967283 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"marketplace-operator-79b997595-7bxfp_openshift-marketplace(203500e9-a305-42cd-9909-d64ca944d363)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"marketplace-operator-79b997595-7bxfp_openshift-marketplace(203500e9-a305-42cd-9909-d64ca944d363)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-7bxfp_openshift-marketplace_203500e9-a305-42cd-9909-d64ca944d363_0(8f05dd712fbd9acdb50940799be172d5091170b7bcc21f1c57305e76a4b4631f): error adding pod openshift-marketplace_marketplace-operator-79b997595-7bxfp to CNI network \\\"multus-cni-network\\\": plugin type=\\\"multus-shim\\\" name=\\\"multus-cni-network\\\" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:\\\"8f05dd712fbd9acdb50940799be172d5091170b7bcc21f1c57305e76a4b4631f\\\" Netns:\\\"/var/run/netns/b8a9320d-11d7-489f-81e7-8696a3c4b783\\\" IfName:\\\"eth0\\\" Args:\\\"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-7bxfp;K8S_POD_INFRA_CONTAINER_ID=8f05dd712fbd9acdb50940799be172d5091170b7bcc21f1c57305e76a4b4631f;K8S_POD_UID=203500e9-a305-42cd-9909-d64ca944d363\\\" Path:\\\"\\\" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-7bxfp] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-7bxfp/203500e9-a305-42cd-9909-d64ca944d363]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: status update failed for pod /: Get \\\"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-7bxfp?timeout=1m0s\\\": dial tcp 38.102.83.51:6443: connect: connection refused\\n': StdinData: {\\\"binDir\\\":\\\"/var/lib/cni/bin\\\",\\\"clusterNetwork\\\":\\\"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf\\\",\\\"cniVersion\\\":\\\"0.3.1\\\",\\\"daemonSocketDir\\\":\\\"/run/multus/socket\\\",\\\"globalNamespaces\\\":\\\"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv\\\",\\\"logLevel\\\":\\\"verbose\\\",\\\"logToStderr\\\":true,\\\"name\\\":\\\"multus-cni-network\\\",\\\"namespaceIsolation\\\":true,\\\"type\\\":\\\"multus-shim\\\"}\"" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" podUID="203500e9-a305-42cd-9909-d64ca944d363" Dec 11 14:18:41 crc kubenswrapper[4690]: I1211 14:18:41.950857 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c61312d-523c-44a8-a451-dc96bba0f6d7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0c61312d-523c-44a8-a451-dc96bba0f6d7" (UID: "0c61312d-523c-44a8-a451-dc96bba0f6d7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.005162 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c61312d-523c-44a8-a451-dc96bba0f6d7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.107688 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.108269 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.108966 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.109288 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.109551 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.109819 4690 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.110081 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.110320 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.110538 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.110767 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.111055 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.111227 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.953023 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.953158 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"002765fc93cc12db18dea5d7225e7940b74dd170c3337673810bf54f450f73ea"} Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.954257 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.954530 4690 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.954880 4690 status_manager.go:851] "Failed to get status for pod" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" pod="openshift-machine-config-operator/machine-config-daemon-z9662" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-z9662\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.955376 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.955680 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.955944 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.956231 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.956493 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.956771 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.957065 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.957383 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.957656 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.957930 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.958335 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.958592 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.958838 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.959101 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.959350 4690 status_manager.go:851] "Failed to get status for pod" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" pod="openshift-machine-config-operator/machine-config-daemon-z9662" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-z9662\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.959601 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.959888 4690 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.960177 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.960418 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.960691 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.960972 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.961188 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:42 crc kubenswrapper[4690]: I1211 14:18:42.961460 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:43 crc kubenswrapper[4690]: I1211 14:18:43.522540 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" podUID="787bb691-ec3b-4dad-868a-3dcd2c33f4e1" containerName="oauth-openshift" containerID="cri-o://c69bcd89ea4fc0cd893aaae8c8c5ac078200644c1c10caca5f968f81468bc12d" gracePeriod=15 Dec 11 14:18:43 crc kubenswrapper[4690]: E1211 14:18:43.671466 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.51:6443: connect: connection refused" interval="7s" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.569897 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.570940 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.571632 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.571902 4690 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.572101 4690 status_manager.go:851] "Failed to get status for pod" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" pod="openshift-machine-config-operator/machine-config-daemon-z9662" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-z9662\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.572263 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.572414 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.572569 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.572724 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.572899 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.573171 4690 status_manager.go:851] "Failed to get status for pod" podUID="787bb691-ec3b-4dad-868a-3dcd2c33f4e1" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-9wx9g\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.573333 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.573483 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.573626 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.573773 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.665648 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-cliconfig\") pod \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.666812 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "787bb691-ec3b-4dad-868a-3dcd2c33f4e1" (UID: "787bb691-ec3b-4dad-868a-3dcd2c33f4e1"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.666455 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-trusted-ca-bundle\") pod \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.667100 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "787bb691-ec3b-4dad-868a-3dcd2c33f4e1" (UID: "787bb691-ec3b-4dad-868a-3dcd2c33f4e1"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.667204 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-audit-dir\") pod \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.667280 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-login\") pod \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.667324 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgm59\" (UniqueName: \"kubernetes.io/projected/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-kube-api-access-pgm59\") pod \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.667349 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-provider-selection\") pod \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.667402 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-serving-cert\") pod \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.667443 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-idp-0-file-data\") pod \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.667514 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-audit-policies\") pod \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.667550 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-error\") pod \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.667575 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-service-ca\") pod \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.667606 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-ocp-branding-template\") pod \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.667643 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-router-certs\") pod \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.667671 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-session\") pod \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\" (UID: \"787bb691-ec3b-4dad-868a-3dcd2c33f4e1\") " Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.668401 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.668431 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.668759 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "787bb691-ec3b-4dad-868a-3dcd2c33f4e1" (UID: "787bb691-ec3b-4dad-868a-3dcd2c33f4e1"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.670740 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "787bb691-ec3b-4dad-868a-3dcd2c33f4e1" (UID: "787bb691-ec3b-4dad-868a-3dcd2c33f4e1"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.671020 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "787bb691-ec3b-4dad-868a-3dcd2c33f4e1" (UID: "787bb691-ec3b-4dad-868a-3dcd2c33f4e1"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.676460 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "787bb691-ec3b-4dad-868a-3dcd2c33f4e1" (UID: "787bb691-ec3b-4dad-868a-3dcd2c33f4e1"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.677396 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "787bb691-ec3b-4dad-868a-3dcd2c33f4e1" (UID: "787bb691-ec3b-4dad-868a-3dcd2c33f4e1"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.680768 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-kube-api-access-pgm59" (OuterVolumeSpecName: "kube-api-access-pgm59") pod "787bb691-ec3b-4dad-868a-3dcd2c33f4e1" (UID: "787bb691-ec3b-4dad-868a-3dcd2c33f4e1"). InnerVolumeSpecName "kube-api-access-pgm59". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.681060 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "787bb691-ec3b-4dad-868a-3dcd2c33f4e1" (UID: "787bb691-ec3b-4dad-868a-3dcd2c33f4e1"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.681496 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "787bb691-ec3b-4dad-868a-3dcd2c33f4e1" (UID: "787bb691-ec3b-4dad-868a-3dcd2c33f4e1"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.684334 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "787bb691-ec3b-4dad-868a-3dcd2c33f4e1" (UID: "787bb691-ec3b-4dad-868a-3dcd2c33f4e1"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.686064 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "787bb691-ec3b-4dad-868a-3dcd2c33f4e1" (UID: "787bb691-ec3b-4dad-868a-3dcd2c33f4e1"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.690514 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "787bb691-ec3b-4dad-868a-3dcd2c33f4e1" (UID: "787bb691-ec3b-4dad-868a-3dcd2c33f4e1"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.691015 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "787bb691-ec3b-4dad-868a-3dcd2c33f4e1" (UID: "787bb691-ec3b-4dad-868a-3dcd2c33f4e1"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.769616 4690 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.769668 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.769686 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.769703 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.769721 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.769736 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.769748 4690 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.769761 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.769776 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgm59\" (UniqueName: \"kubernetes.io/projected/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-kube-api-access-pgm59\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.769791 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.769808 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.769822 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/787bb691-ec3b-4dad-868a-3dcd2c33f4e1-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.976935 4690 generic.go:334] "Generic (PLEG): container finished" podID="787bb691-ec3b-4dad-868a-3dcd2c33f4e1" containerID="c69bcd89ea4fc0cd893aaae8c8c5ac078200644c1c10caca5f968f81468bc12d" exitCode=0 Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.977020 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" event={"ID":"787bb691-ec3b-4dad-868a-3dcd2c33f4e1","Type":"ContainerDied","Data":"c69bcd89ea4fc0cd893aaae8c8c5ac078200644c1c10caca5f968f81468bc12d"} Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.977426 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" event={"ID":"787bb691-ec3b-4dad-868a-3dcd2c33f4e1","Type":"ContainerDied","Data":"ae4da0eeb6e663d73dd702ff84b1dd0bd68d1be389a53ad4aea88131bfdf70d5"} Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.977453 4690 scope.go:117] "RemoveContainer" containerID="c69bcd89ea4fc0cd893aaae8c8c5ac078200644c1c10caca5f968f81468bc12d" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.977070 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.978282 4690 status_manager.go:851] "Failed to get status for pod" podUID="787bb691-ec3b-4dad-868a-3dcd2c33f4e1" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-9wx9g\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.978579 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.978808 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.979148 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.979393 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.979669 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.980121 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.980350 4690 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.980826 4690 status_manager.go:851] "Failed to get status for pod" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" pod="openshift-machine-config-operator/machine-config-daemon-z9662" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-z9662\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.981728 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.981973 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.982178 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.982487 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.982945 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.995834 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.996504 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.997021 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.997295 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.997532 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.997819 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.998359 4690 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.998623 4690 status_manager.go:851] "Failed to get status for pod" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" pod="openshift-machine-config-operator/machine-config-daemon-z9662" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-z9662\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.998894 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.999286 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.999515 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:46 crc kubenswrapper[4690]: I1211 14:18:46.999763 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.000056 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.000313 4690 status_manager.go:851] "Failed to get status for pod" podUID="787bb691-ec3b-4dad-868a-3dcd2c33f4e1" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-9wx9g\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.004403 4690 scope.go:117] "RemoveContainer" containerID="c69bcd89ea4fc0cd893aaae8c8c5ac078200644c1c10caca5f968f81468bc12d" Dec 11 14:18:47 crc kubenswrapper[4690]: E1211 14:18:47.005046 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c69bcd89ea4fc0cd893aaae8c8c5ac078200644c1c10caca5f968f81468bc12d\": container with ID starting with c69bcd89ea4fc0cd893aaae8c8c5ac078200644c1c10caca5f968f81468bc12d not found: ID does not exist" containerID="c69bcd89ea4fc0cd893aaae8c8c5ac078200644c1c10caca5f968f81468bc12d" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.005191 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c69bcd89ea4fc0cd893aaae8c8c5ac078200644c1c10caca5f968f81468bc12d"} err="failed to get container status \"c69bcd89ea4fc0cd893aaae8c8c5ac078200644c1c10caca5f968f81468bc12d\": rpc error: code = NotFound desc = could not find container \"c69bcd89ea4fc0cd893aaae8c8c5ac078200644c1c10caca5f968f81468bc12d\": container with ID starting with c69bcd89ea4fc0cd893aaae8c8c5ac078200644c1c10caca5f968f81468bc12d not found: ID does not exist" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.631082 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.632411 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.633116 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.633533 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.633837 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.634278 4690 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.634732 4690 status_manager.go:851] "Failed to get status for pod" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" pod="openshift-machine-config-operator/machine-config-daemon-z9662" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-z9662\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.635087 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.635449 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.635795 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.636179 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.636462 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.636768 4690 status_manager.go:851] "Failed to get status for pod" podUID="787bb691-ec3b-4dad-868a-3dcd2c33f4e1" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-9wx9g\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.637109 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.637532 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.648638 4690 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ca93a1ee-ec99-4255-9ae2-a987cf127929" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.648686 4690 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ca93a1ee-ec99-4255-9ae2-a987cf127929" Dec 11 14:18:47 crc kubenswrapper[4690]: E1211 14:18:47.649170 4690 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.649857 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:18:47 crc kubenswrapper[4690]: E1211 14:18:47.724506 4690 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.51:6443: connect: connection refused" event=< Dec 11 14:18:47 crc kubenswrapper[4690]: &Event{ObjectMeta:{marketplace-operator-79b997595-7bxfp.18802ef9bbfd3991 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:marketplace-operator-79b997595-7bxfp,UID:203500e9-a305-42cd-9909-d64ca944d363,APIVersion:v1,ResourceVersion:29396,FieldPath:,},Reason:FailedCreatePodSandBox,Message:Failed to create pod sandbox: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_marketplace-operator-79b997595-7bxfp_openshift-marketplace_203500e9-a305-42cd-9909-d64ca944d363_0(929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13): error adding pod openshift-marketplace_marketplace-operator-79b997595-7bxfp to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13" Netns:"/var/run/netns/e27a2426-a164-4a42-9b0f-8d41a5f46787" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openshift-marketplace;K8S_POD_NAME=marketplace-operator-79b997595-7bxfp;K8S_POD_INFRA_CONTAINER_ID=929ca455def51e6f906b7f719ecadaa57d5e4f2dc1c75be4829b089710951e13;K8S_POD_UID=203500e9-a305-42cd-9909-d64ca944d363" Path:"" ERRORED: error configuring pod [openshift-marketplace/marketplace-operator-79b997595-7bxfp] networking: Multus: [openshift-marketplace/marketplace-operator-79b997595-7bxfp/203500e9-a305-42cd-9909-d64ca944d363]: error setting the networks status: SetPodNetworkStatusAnnotation: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: SetNetworkStatus: failed to update the pod marketplace-operator-79b997595-7bxfp in out of cluster comm: status update failed for pod /: Get "https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-7bxfp?timeout=1m0s": dial tcp 38.102.83.51:6443: connect: connection refused Dec 11 14:18:47 crc kubenswrapper[4690]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"},Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-11 14:18:24.064903569 +0000 UTC m=+235.680305212,LastTimestamp:2025-12-11 14:18:24.064903569 +0000 UTC m=+235.680305212,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Dec 11 14:18:47 crc kubenswrapper[4690]: > Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.800167 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.806590 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.807409 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.808068 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.808402 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.808795 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.809334 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.809631 4690 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.810061 4690 status_manager.go:851] "Failed to get status for pod" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" pod="openshift-machine-config-operator/machine-config-daemon-z9662" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-z9662\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.810365 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.810693 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.811034 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.811363 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.811641 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.811999 4690 status_manager.go:851] "Failed to get status for pod" podUID="787bb691-ec3b-4dad-868a-3dcd2c33f4e1" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-9wx9g\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.812335 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.987567 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b0994745cfa3c7ccab31461c862b28af22b19e6ed7ea2e0825e0e3a7e46980c5"} Dec 11 14:18:47 crc kubenswrapper[4690]: I1211 14:18:47.988221 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.636395 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.636788 4690 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.637233 4690 status_manager.go:851] "Failed to get status for pod" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" pod="openshift-machine-config-operator/machine-config-daemon-z9662" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-z9662\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.638116 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.638650 4690 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.638999 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.639370 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.639711 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.640086 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.642326 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.642715 4690 status_manager.go:851] "Failed to get status for pod" podUID="787bb691-ec3b-4dad-868a-3dcd2c33f4e1" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-9wx9g\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.643004 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.643275 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.643564 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.643807 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.994707 4690 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="c96ac1cc82aad8338f1dea6b1509e3c87a8af5f95b3fa48545ad81b9368ba905" exitCode=0 Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.994832 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"c96ac1cc82aad8338f1dea6b1509e3c87a8af5f95b3fa48545ad81b9368ba905"} Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.995267 4690 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ca93a1ee-ec99-4255-9ae2-a987cf127929" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.995290 4690 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ca93a1ee-ec99-4255-9ae2-a987cf127929" Dec 11 14:18:48 crc kubenswrapper[4690]: E1211 14:18:48.995798 4690 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.996075 4690 status_manager.go:851] "Failed to get status for pod" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" pod="openshift-marketplace/community-operators-8bfnc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-8bfnc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.996696 4690 status_manager.go:851] "Failed to get status for pod" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" pod="openshift-marketplace/redhat-operators-24xfd" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-24xfd\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.998541 4690 status_manager.go:851] "Failed to get status for pod" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.999128 4690 status_manager.go:851] "Failed to get status for pod" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" pod="openshift-marketplace/marketplace-operator-79b997595-r2jbn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/marketplace-operator-79b997595-r2jbn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:48 crc kubenswrapper[4690]: I1211 14:18:48.999610 4690 status_manager.go:851] "Failed to get status for pod" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" pod="openshift-marketplace/certified-operators-fs97w" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-fs97w\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:49 crc kubenswrapper[4690]: I1211 14:18:49.000107 4690 status_manager.go:851] "Failed to get status for pod" podUID="787bb691-ec3b-4dad-868a-3dcd2c33f4e1" pod="openshift-authentication/oauth-openshift-558db77b4-9wx9g" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-9wx9g\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:49 crc kubenswrapper[4690]: I1211 14:18:49.000364 4690 status_manager.go:851] "Failed to get status for pod" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" pod="openshift-marketplace/community-operators-kmjlt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kmjlt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:49 crc kubenswrapper[4690]: I1211 14:18:49.000687 4690 status_manager.go:851] "Failed to get status for pod" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" pod="openshift-marketplace/redhat-operators-xq68s" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xq68s\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:49 crc kubenswrapper[4690]: I1211 14:18:49.001032 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:49 crc kubenswrapper[4690]: I1211 14:18:49.001285 4690 status_manager.go:851] "Failed to get status for pod" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" pod="openshift-marketplace/certified-operators-djmxn" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-djmxn\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:49 crc kubenswrapper[4690]: I1211 14:18:49.001574 4690 status_manager.go:851] "Failed to get status for pod" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" pod="openshift-marketplace/redhat-marketplace-k2lrt" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-k2lrt\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:49 crc kubenswrapper[4690]: I1211 14:18:49.002434 4690 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:49 crc kubenswrapper[4690]: I1211 14:18:49.002773 4690 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:49 crc kubenswrapper[4690]: I1211 14:18:49.003130 4690 status_manager.go:851] "Failed to get status for pod" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" pod="openshift-machine-config-operator/machine-config-daemon-z9662" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-z9662\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:49 crc kubenswrapper[4690]: I1211 14:18:49.003463 4690 status_manager.go:851] "Failed to get status for pod" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" pod="openshift-marketplace/redhat-marketplace-r7754" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-r7754\": dial tcp 38.102.83.51:6443: connect: connection refused" Dec 11 14:18:50 crc kubenswrapper[4690]: I1211 14:18:50.008116 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0d2dc4863b0238896bb8526b26b6cc29fda0bb78e764f1f66d2d7ddbee9e61f1"} Dec 11 14:18:50 crc kubenswrapper[4690]: I1211 14:18:50.009796 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"cba2ee9ffc79adb02260b304b2d2c6c6fdb8c582f036e93930945017e69d0ab4"} Dec 11 14:18:51 crc kubenswrapper[4690]: I1211 14:18:51.017011 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"4c0f7bd5bfd77feeca45d48ca7ac74a1f48e94ca4bc734c53406b6fa9372a41c"} Dec 11 14:18:51 crc kubenswrapper[4690]: I1211 14:18:51.017066 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"adbeed729350ca0c0693f4801196e5c9d1689d6dcc2ed1d4cdb43833abc383fd"} Dec 11 14:18:52 crc kubenswrapper[4690]: I1211 14:18:52.026492 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d9070b691e8fd112da4910ffec70ebf195b853d101b2c83ebd91b365f6344cb9"} Dec 11 14:18:52 crc kubenswrapper[4690]: I1211 14:18:52.026757 4690 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ca93a1ee-ec99-4255-9ae2-a987cf127929" Dec 11 14:18:52 crc kubenswrapper[4690]: I1211 14:18:52.026841 4690 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ca93a1ee-ec99-4255-9ae2-a987cf127929" Dec 11 14:18:52 crc kubenswrapper[4690]: I1211 14:18:52.026928 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:18:52 crc kubenswrapper[4690]: I1211 14:18:52.035052 4690 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:18:52 crc kubenswrapper[4690]: I1211 14:18:52.129973 4690 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="95086166-fd15-4182-9b1a-f3c601c30190" Dec 11 14:18:52 crc kubenswrapper[4690]: I1211 14:18:52.630528 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:52 crc kubenswrapper[4690]: I1211 14:18:52.630979 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:53 crc kubenswrapper[4690]: I1211 14:18:53.033997 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" event={"ID":"203500e9-a305-42cd-9909-d64ca944d363","Type":"ContainerStarted","Data":"e7687c37c2e48ff6ab9ac9962e42d5f48ee850ec0b3ec851dbf0782588ab5986"} Dec 11 14:18:53 crc kubenswrapper[4690]: I1211 14:18:53.034330 4690 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ca93a1ee-ec99-4255-9ae2-a987cf127929" Dec 11 14:18:53 crc kubenswrapper[4690]: I1211 14:18:53.034422 4690 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ca93a1ee-ec99-4255-9ae2-a987cf127929" Dec 11 14:18:53 crc kubenswrapper[4690]: I1211 14:18:53.038521 4690 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="95086166-fd15-4182-9b1a-f3c601c30190" Dec 11 14:18:54 crc kubenswrapper[4690]: I1211 14:18:54.040146 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-7bxfp_203500e9-a305-42cd-9909-d64ca944d363/marketplace-operator/0.log" Dec 11 14:18:54 crc kubenswrapper[4690]: I1211 14:18:54.040450 4690 generic.go:334] "Generic (PLEG): container finished" podID="203500e9-a305-42cd-9909-d64ca944d363" containerID="0e71884c4128f1458dfd9b01554439ff25499edfae79ee36f2a2aa1fb5517198" exitCode=1 Dec 11 14:18:54 crc kubenswrapper[4690]: I1211 14:18:54.040482 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" event={"ID":"203500e9-a305-42cd-9909-d64ca944d363","Type":"ContainerDied","Data":"0e71884c4128f1458dfd9b01554439ff25499edfae79ee36f2a2aa1fb5517198"} Dec 11 14:18:54 crc kubenswrapper[4690]: I1211 14:18:54.041059 4690 scope.go:117] "RemoveContainer" containerID="0e71884c4128f1458dfd9b01554439ff25499edfae79ee36f2a2aa1fb5517198" Dec 11 14:18:55 crc kubenswrapper[4690]: I1211 14:18:55.047111 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-7bxfp_203500e9-a305-42cd-9909-d64ca944d363/marketplace-operator/0.log" Dec 11 14:18:55 crc kubenswrapper[4690]: I1211 14:18:55.047539 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" event={"ID":"203500e9-a305-42cd-9909-d64ca944d363","Type":"ContainerStarted","Data":"210da0c7628af877935a8de24dadfdf2718a5460b8b8235b5bcfe83cf9883b18"} Dec 11 14:18:55 crc kubenswrapper[4690]: I1211 14:18:55.048000 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:18:55 crc kubenswrapper[4690]: I1211 14:18:55.049565 4690 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-7bxfp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.56:8080/healthz\": dial tcp 10.217.0.56:8080: connect: connection refused" start-of-body= Dec 11 14:18:55 crc kubenswrapper[4690]: I1211 14:18:55.049625 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" podUID="203500e9-a305-42cd-9909-d64ca944d363" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.56:8080/healthz\": dial tcp 10.217.0.56:8080: connect: connection refused" Dec 11 14:18:56 crc kubenswrapper[4690]: I1211 14:18:56.054866 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-7bxfp_203500e9-a305-42cd-9909-d64ca944d363/marketplace-operator/1.log" Dec 11 14:18:56 crc kubenswrapper[4690]: I1211 14:18:56.056337 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-7bxfp_203500e9-a305-42cd-9909-d64ca944d363/marketplace-operator/0.log" Dec 11 14:18:56 crc kubenswrapper[4690]: I1211 14:18:56.056479 4690 generic.go:334] "Generic (PLEG): container finished" podID="203500e9-a305-42cd-9909-d64ca944d363" containerID="210da0c7628af877935a8de24dadfdf2718a5460b8b8235b5bcfe83cf9883b18" exitCode=1 Dec 11 14:18:56 crc kubenswrapper[4690]: I1211 14:18:56.056607 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" event={"ID":"203500e9-a305-42cd-9909-d64ca944d363","Type":"ContainerDied","Data":"210da0c7628af877935a8de24dadfdf2718a5460b8b8235b5bcfe83cf9883b18"} Dec 11 14:18:56 crc kubenswrapper[4690]: I1211 14:18:56.056726 4690 scope.go:117] "RemoveContainer" containerID="0e71884c4128f1458dfd9b01554439ff25499edfae79ee36f2a2aa1fb5517198" Dec 11 14:18:56 crc kubenswrapper[4690]: I1211 14:18:56.057438 4690 scope.go:117] "RemoveContainer" containerID="210da0c7628af877935a8de24dadfdf2718a5460b8b8235b5bcfe83cf9883b18" Dec 11 14:18:56 crc kubenswrapper[4690]: E1211 14:18:56.057794 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-7bxfp_openshift-marketplace(203500e9-a305-42cd-9909-d64ca944d363)\"" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" podUID="203500e9-a305-42cd-9909-d64ca944d363" Dec 11 14:18:57 crc kubenswrapper[4690]: I1211 14:18:57.064389 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-7bxfp_203500e9-a305-42cd-9909-d64ca944d363/marketplace-operator/1.log" Dec 11 14:18:57 crc kubenswrapper[4690]: I1211 14:18:57.065655 4690 scope.go:117] "RemoveContainer" containerID="210da0c7628af877935a8de24dadfdf2718a5460b8b8235b5bcfe83cf9883b18" Dec 11 14:18:57 crc kubenswrapper[4690]: E1211 14:18:57.065846 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-7bxfp_openshift-marketplace(203500e9-a305-42cd-9909-d64ca944d363)\"" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" podUID="203500e9-a305-42cd-9909-d64ca944d363" Dec 11 14:19:00 crc kubenswrapper[4690]: I1211 14:19:00.251704 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 11 14:19:03 crc kubenswrapper[4690]: I1211 14:19:03.545635 4690 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:19:03 crc kubenswrapper[4690]: I1211 14:19:03.546712 4690 scope.go:117] "RemoveContainer" containerID="210da0c7628af877935a8de24dadfdf2718a5460b8b8235b5bcfe83cf9883b18" Dec 11 14:19:03 crc kubenswrapper[4690]: E1211 14:19:03.546930 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 10s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-7bxfp_openshift-marketplace(203500e9-a305-42cd-9909-d64ca944d363)\"" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" podUID="203500e9-a305-42cd-9909-d64ca944d363" Dec 11 14:19:14 crc kubenswrapper[4690]: I1211 14:19:14.632095 4690 scope.go:117] "RemoveContainer" containerID="210da0c7628af877935a8de24dadfdf2718a5460b8b8235b5bcfe83cf9883b18" Dec 11 14:19:16 crc kubenswrapper[4690]: I1211 14:19:16.174324 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-7bxfp_203500e9-a305-42cd-9909-d64ca944d363/marketplace-operator/1.log" Dec 11 14:19:16 crc kubenswrapper[4690]: I1211 14:19:16.174663 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" event={"ID":"203500e9-a305-42cd-9909-d64ca944d363","Type":"ContainerStarted","Data":"974c350cb459faf7af6175a3f71d9bb8bb2664a9e8df96966cb6d1792306b097"} Dec 11 14:19:17 crc kubenswrapper[4690]: I1211 14:19:17.182537 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-7bxfp_203500e9-a305-42cd-9909-d64ca944d363/marketplace-operator/2.log" Dec 11 14:19:17 crc kubenswrapper[4690]: I1211 14:19:17.184436 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-7bxfp_203500e9-a305-42cd-9909-d64ca944d363/marketplace-operator/1.log" Dec 11 14:19:17 crc kubenswrapper[4690]: I1211 14:19:17.184507 4690 generic.go:334] "Generic (PLEG): container finished" podID="203500e9-a305-42cd-9909-d64ca944d363" containerID="974c350cb459faf7af6175a3f71d9bb8bb2664a9e8df96966cb6d1792306b097" exitCode=1 Dec 11 14:19:17 crc kubenswrapper[4690]: I1211 14:19:17.184563 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" event={"ID":"203500e9-a305-42cd-9909-d64ca944d363","Type":"ContainerDied","Data":"974c350cb459faf7af6175a3f71d9bb8bb2664a9e8df96966cb6d1792306b097"} Dec 11 14:19:17 crc kubenswrapper[4690]: I1211 14:19:17.184651 4690 scope.go:117] "RemoveContainer" containerID="210da0c7628af877935a8de24dadfdf2718a5460b8b8235b5bcfe83cf9883b18" Dec 11 14:19:17 crc kubenswrapper[4690]: I1211 14:19:17.185268 4690 scope.go:117] "RemoveContainer" containerID="974c350cb459faf7af6175a3f71d9bb8bb2664a9e8df96966cb6d1792306b097" Dec 11 14:19:17 crc kubenswrapper[4690]: E1211 14:19:17.185628 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-7bxfp_openshift-marketplace(203500e9-a305-42cd-9909-d64ca944d363)\"" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" podUID="203500e9-a305-42cd-9909-d64ca944d363" Dec 11 14:19:18 crc kubenswrapper[4690]: I1211 14:19:18.193499 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-7bxfp_203500e9-a305-42cd-9909-d64ca944d363/marketplace-operator/2.log" Dec 11 14:19:18 crc kubenswrapper[4690]: I1211 14:19:18.194054 4690 scope.go:117] "RemoveContainer" containerID="974c350cb459faf7af6175a3f71d9bb8bb2664a9e8df96966cb6d1792306b097" Dec 11 14:19:18 crc kubenswrapper[4690]: E1211 14:19:18.194236 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-7bxfp_openshift-marketplace(203500e9-a305-42cd-9909-d64ca944d363)\"" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" podUID="203500e9-a305-42cd-9909-d64ca944d363" Dec 11 14:19:22 crc kubenswrapper[4690]: I1211 14:19:22.772186 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 11 14:19:23 crc kubenswrapper[4690]: I1211 14:19:23.545670 4690 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:19:23 crc kubenswrapper[4690]: I1211 14:19:23.546270 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:19:23 crc kubenswrapper[4690]: I1211 14:19:23.547055 4690 scope.go:117] "RemoveContainer" containerID="974c350cb459faf7af6175a3f71d9bb8bb2664a9e8df96966cb6d1792306b097" Dec 11 14:19:23 crc kubenswrapper[4690]: E1211 14:19:23.547453 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"marketplace-operator\" with CrashLoopBackOff: \"back-off 20s restarting failed container=marketplace-operator pod=marketplace-operator-79b997595-7bxfp_openshift-marketplace(203500e9-a305-42cd-9909-d64ca944d363)\"" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" podUID="203500e9-a305-42cd-9909-d64ca944d363" Dec 11 14:19:23 crc kubenswrapper[4690]: I1211 14:19:23.804201 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 11 14:19:24 crc kubenswrapper[4690]: I1211 14:19:24.172150 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 11 14:19:24 crc kubenswrapper[4690]: I1211 14:19:24.330436 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 11 14:19:25 crc kubenswrapper[4690]: I1211 14:19:25.172001 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 11 14:19:25 crc kubenswrapper[4690]: I1211 14:19:25.286741 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 11 14:19:25 crc kubenswrapper[4690]: I1211 14:19:25.417359 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 11 14:19:25 crc kubenswrapper[4690]: I1211 14:19:25.660539 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 11 14:19:26 crc kubenswrapper[4690]: I1211 14:19:26.278251 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 11 14:19:26 crc kubenswrapper[4690]: I1211 14:19:26.756033 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 11 14:19:26 crc kubenswrapper[4690]: I1211 14:19:26.964676 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 11 14:19:27 crc kubenswrapper[4690]: I1211 14:19:27.276238 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 11 14:19:27 crc kubenswrapper[4690]: I1211 14:19:27.391342 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 11 14:19:27 crc kubenswrapper[4690]: I1211 14:19:27.419152 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 11 14:19:27 crc kubenswrapper[4690]: I1211 14:19:27.547824 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 11 14:19:28 crc kubenswrapper[4690]: I1211 14:19:28.058129 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 11 14:19:28 crc kubenswrapper[4690]: I1211 14:19:28.153908 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 11 14:19:28 crc kubenswrapper[4690]: I1211 14:19:28.485010 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 11 14:19:28 crc kubenswrapper[4690]: I1211 14:19:28.486542 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 11 14:19:28 crc kubenswrapper[4690]: I1211 14:19:28.720768 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 11 14:19:28 crc kubenswrapper[4690]: I1211 14:19:28.949869 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 11 14:19:29 crc kubenswrapper[4690]: I1211 14:19:29.026893 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 11 14:19:29 crc kubenswrapper[4690]: I1211 14:19:29.030778 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 11 14:19:29 crc kubenswrapper[4690]: I1211 14:19:29.177111 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 11 14:19:29 crc kubenswrapper[4690]: I1211 14:19:29.341382 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 11 14:19:29 crc kubenswrapper[4690]: I1211 14:19:29.609876 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 11 14:19:29 crc kubenswrapper[4690]: I1211 14:19:29.624846 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 11 14:19:29 crc kubenswrapper[4690]: I1211 14:19:29.796796 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 11 14:19:30 crc kubenswrapper[4690]: I1211 14:19:30.106623 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 11 14:19:32 crc kubenswrapper[4690]: I1211 14:19:30.268972 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 11 14:19:32 crc kubenswrapper[4690]: I1211 14:19:31.151397 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 11 14:19:32 crc kubenswrapper[4690]: I1211 14:19:31.159479 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 11 14:19:32 crc kubenswrapper[4690]: I1211 14:19:31.660167 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 11 14:19:32 crc kubenswrapper[4690]: I1211 14:19:31.752927 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 11 14:19:32 crc kubenswrapper[4690]: I1211 14:19:32.305622 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 11 14:19:32 crc kubenswrapper[4690]: I1211 14:19:32.336658 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 11 14:19:32 crc kubenswrapper[4690]: I1211 14:19:32.403824 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 11 14:19:32 crc kubenswrapper[4690]: I1211 14:19:32.486117 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 11 14:19:32 crc kubenswrapper[4690]: I1211 14:19:32.489654 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 11 14:19:32 crc kubenswrapper[4690]: I1211 14:19:32.900114 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 11 14:19:33 crc kubenswrapper[4690]: I1211 14:19:33.247413 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 11 14:19:33 crc kubenswrapper[4690]: I1211 14:19:33.273476 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 11 14:19:33 crc kubenswrapper[4690]: I1211 14:19:33.372484 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 11 14:19:33 crc kubenswrapper[4690]: I1211 14:19:33.516818 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 11 14:19:33 crc kubenswrapper[4690]: I1211 14:19:33.939768 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 11 14:19:33 crc kubenswrapper[4690]: I1211 14:19:33.952569 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 11 14:19:34 crc kubenswrapper[4690]: I1211 14:19:34.306937 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 11 14:19:34 crc kubenswrapper[4690]: I1211 14:19:34.318923 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 11 14:19:34 crc kubenswrapper[4690]: I1211 14:19:34.417813 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 11 14:19:34 crc kubenswrapper[4690]: I1211 14:19:34.541713 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 11 14:19:34 crc kubenswrapper[4690]: I1211 14:19:34.579241 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 11 14:19:34 crc kubenswrapper[4690]: I1211 14:19:34.740777 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 11 14:19:34 crc kubenswrapper[4690]: I1211 14:19:34.753946 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 11 14:19:34 crc kubenswrapper[4690]: I1211 14:19:34.803439 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 11 14:19:34 crc kubenswrapper[4690]: I1211 14:19:34.854439 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 11 14:19:34 crc kubenswrapper[4690]: I1211 14:19:34.894242 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 11 14:19:34 crc kubenswrapper[4690]: I1211 14:19:34.932521 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 11 14:19:35 crc kubenswrapper[4690]: I1211 14:19:35.075510 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 11 14:19:35 crc kubenswrapper[4690]: I1211 14:19:35.335066 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 11 14:19:35 crc kubenswrapper[4690]: I1211 14:19:35.369190 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 11 14:19:35 crc kubenswrapper[4690]: I1211 14:19:35.425704 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 11 14:19:35 crc kubenswrapper[4690]: I1211 14:19:35.433556 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 11 14:19:35 crc kubenswrapper[4690]: I1211 14:19:35.635560 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 11 14:19:35 crc kubenswrapper[4690]: I1211 14:19:35.669774 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 11 14:19:35 crc kubenswrapper[4690]: I1211 14:19:35.805605 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 11 14:19:35 crc kubenswrapper[4690]: I1211 14:19:35.970906 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 11 14:19:35 crc kubenswrapper[4690]: I1211 14:19:35.988103 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 11 14:19:36 crc kubenswrapper[4690]: I1211 14:19:36.092066 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 11 14:19:36 crc kubenswrapper[4690]: I1211 14:19:36.093872 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 14:19:36 crc kubenswrapper[4690]: I1211 14:19:36.222485 4690 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 11 14:19:36 crc kubenswrapper[4690]: I1211 14:19:36.251987 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 11 14:19:36 crc kubenswrapper[4690]: I1211 14:19:36.274044 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 11 14:19:36 crc kubenswrapper[4690]: I1211 14:19:36.681141 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 11 14:19:36 crc kubenswrapper[4690]: I1211 14:19:36.690319 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 11 14:19:36 crc kubenswrapper[4690]: I1211 14:19:36.818214 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 11 14:19:36 crc kubenswrapper[4690]: I1211 14:19:36.836216 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 11 14:19:37 crc kubenswrapper[4690]: I1211 14:19:37.321886 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 11 14:19:37 crc kubenswrapper[4690]: I1211 14:19:37.456486 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 11 14:19:37 crc kubenswrapper[4690]: I1211 14:19:37.605207 4690 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 11 14:19:37 crc kubenswrapper[4690]: I1211 14:19:37.627914 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 11 14:19:37 crc kubenswrapper[4690]: I1211 14:19:37.717046 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 11 14:19:38 crc kubenswrapper[4690]: I1211 14:19:38.109531 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 11 14:19:38 crc kubenswrapper[4690]: I1211 14:19:38.332871 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 11 14:19:38 crc kubenswrapper[4690]: I1211 14:19:38.506440 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 11 14:19:38 crc kubenswrapper[4690]: I1211 14:19:38.525939 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 11 14:19:38 crc kubenswrapper[4690]: I1211 14:19:38.641903 4690 scope.go:117] "RemoveContainer" containerID="974c350cb459faf7af6175a3f71d9bb8bb2664a9e8df96966cb6d1792306b097" Dec 11 14:19:38 crc kubenswrapper[4690]: I1211 14:19:38.953184 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 11 14:19:38 crc kubenswrapper[4690]: I1211 14:19:38.970041 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 11 14:19:39 crc kubenswrapper[4690]: I1211 14:19:39.070475 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 11 14:19:39 crc kubenswrapper[4690]: I1211 14:19:39.259521 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 11 14:19:39 crc kubenswrapper[4690]: I1211 14:19:39.312206 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 11 14:19:39 crc kubenswrapper[4690]: I1211 14:19:39.333461 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 11 14:19:39 crc kubenswrapper[4690]: I1211 14:19:39.358620 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 11 14:19:39 crc kubenswrapper[4690]: I1211 14:19:39.431613 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 11 14:19:39 crc kubenswrapper[4690]: I1211 14:19:39.896916 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 11 14:19:40 crc kubenswrapper[4690]: I1211 14:19:40.070075 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 11 14:19:40 crc kubenswrapper[4690]: I1211 14:19:40.167533 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 11 14:19:40 crc kubenswrapper[4690]: I1211 14:19:40.238511 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 11 14:19:40 crc kubenswrapper[4690]: I1211 14:19:40.279754 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 11 14:19:40 crc kubenswrapper[4690]: I1211 14:19:40.723578 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 11 14:19:40 crc kubenswrapper[4690]: I1211 14:19:40.894700 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 11 14:19:40 crc kubenswrapper[4690]: I1211 14:19:40.904308 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 11 14:19:41 crc kubenswrapper[4690]: I1211 14:19:41.562638 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 11 14:19:41 crc kubenswrapper[4690]: I1211 14:19:41.597502 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 11 14:19:41 crc kubenswrapper[4690]: I1211 14:19:41.785553 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 11 14:19:41 crc kubenswrapper[4690]: I1211 14:19:41.865724 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 14:19:41 crc kubenswrapper[4690]: I1211 14:19:41.980574 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 11 14:19:42 crc kubenswrapper[4690]: I1211 14:19:42.222760 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 11 14:19:42 crc kubenswrapper[4690]: I1211 14:19:42.289052 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 11 14:19:42 crc kubenswrapper[4690]: I1211 14:19:42.316615 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 11 14:19:42 crc kubenswrapper[4690]: I1211 14:19:42.341982 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 11 14:19:42 crc kubenswrapper[4690]: I1211 14:19:42.407471 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 11 14:19:42 crc kubenswrapper[4690]: I1211 14:19:42.645037 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 11 14:19:42 crc kubenswrapper[4690]: I1211 14:19:42.724660 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 11 14:19:42 crc kubenswrapper[4690]: I1211 14:19:42.728001 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 11 14:19:43 crc kubenswrapper[4690]: I1211 14:19:43.315514 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 11 14:19:43 crc kubenswrapper[4690]: I1211 14:19:43.349304 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 11 14:19:43 crc kubenswrapper[4690]: I1211 14:19:43.355681 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 11 14:19:43 crc kubenswrapper[4690]: I1211 14:19:43.418802 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 11 14:19:43 crc kubenswrapper[4690]: I1211 14:19:43.553743 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 11 14:19:43 crc kubenswrapper[4690]: I1211 14:19:43.591786 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 11 14:19:43 crc kubenswrapper[4690]: I1211 14:19:43.728068 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 11 14:19:43 crc kubenswrapper[4690]: I1211 14:19:43.745078 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 11 14:19:43 crc kubenswrapper[4690]: I1211 14:19:43.983533 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 14:19:43 crc kubenswrapper[4690]: I1211 14:19:43.991997 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 11 14:19:44 crc kubenswrapper[4690]: I1211 14:19:44.039756 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 11 14:19:44 crc kubenswrapper[4690]: I1211 14:19:44.074789 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 11 14:19:44 crc kubenswrapper[4690]: I1211 14:19:44.169291 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 11 14:19:44 crc kubenswrapper[4690]: I1211 14:19:44.180128 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 11 14:19:44 crc kubenswrapper[4690]: I1211 14:19:44.275630 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 11 14:19:44 crc kubenswrapper[4690]: I1211 14:19:44.433142 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 11 14:19:44 crc kubenswrapper[4690]: I1211 14:19:44.505012 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 11 14:19:44 crc kubenswrapper[4690]: I1211 14:19:44.535440 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 11 14:19:44 crc kubenswrapper[4690]: I1211 14:19:44.615927 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 11 14:19:44 crc kubenswrapper[4690]: I1211 14:19:44.686160 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 11 14:19:44 crc kubenswrapper[4690]: I1211 14:19:44.835756 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 11 14:19:44 crc kubenswrapper[4690]: I1211 14:19:44.864464 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 11 14:19:44 crc kubenswrapper[4690]: I1211 14:19:44.926567 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 11 14:19:45 crc kubenswrapper[4690]: I1211 14:19:45.017306 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 11 14:19:45 crc kubenswrapper[4690]: I1211 14:19:45.064482 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 11 14:19:45 crc kubenswrapper[4690]: I1211 14:19:45.117718 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 11 14:19:45 crc kubenswrapper[4690]: I1211 14:19:45.157438 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 11 14:19:45 crc kubenswrapper[4690]: I1211 14:19:45.161688 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 11 14:19:45 crc kubenswrapper[4690]: I1211 14:19:45.213366 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 11 14:19:45 crc kubenswrapper[4690]: I1211 14:19:45.218800 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 11 14:19:45 crc kubenswrapper[4690]: I1211 14:19:45.356081 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 11 14:19:45 crc kubenswrapper[4690]: I1211 14:19:45.584238 4690 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 11 14:19:45 crc kubenswrapper[4690]: I1211 14:19:45.837540 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 11 14:19:45 crc kubenswrapper[4690]: I1211 14:19:45.868020 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 11 14:19:46 crc kubenswrapper[4690]: I1211 14:19:46.036583 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 11 14:19:46 crc kubenswrapper[4690]: I1211 14:19:46.065268 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 11 14:19:46 crc kubenswrapper[4690]: I1211 14:19:46.148398 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 11 14:19:46 crc kubenswrapper[4690]: I1211 14:19:46.241267 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 11 14:19:46 crc kubenswrapper[4690]: I1211 14:19:46.243136 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 11 14:19:46 crc kubenswrapper[4690]: I1211 14:19:46.293721 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 11 14:19:46 crc kubenswrapper[4690]: I1211 14:19:46.341302 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 11 14:19:46 crc kubenswrapper[4690]: I1211 14:19:46.343827 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 11 14:19:46 crc kubenswrapper[4690]: I1211 14:19:46.427302 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 11 14:19:46 crc kubenswrapper[4690]: I1211 14:19:46.474134 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 11 14:19:46 crc kubenswrapper[4690]: I1211 14:19:46.744777 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 11 14:19:47 crc kubenswrapper[4690]: I1211 14:19:47.200196 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 11 14:19:47 crc kubenswrapper[4690]: I1211 14:19:47.308583 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 11 14:19:47 crc kubenswrapper[4690]: I1211 14:19:47.457782 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 14:19:47 crc kubenswrapper[4690]: I1211 14:19:47.872854 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 11 14:19:48 crc kubenswrapper[4690]: I1211 14:19:48.711112 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 11 14:19:48 crc kubenswrapper[4690]: I1211 14:19:48.769454 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 11 14:19:48 crc kubenswrapper[4690]: I1211 14:19:48.939325 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 11 14:19:48 crc kubenswrapper[4690]: I1211 14:19:48.991346 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 11 14:19:49 crc kubenswrapper[4690]: I1211 14:19:49.006052 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 11 14:19:49 crc kubenswrapper[4690]: I1211 14:19:49.071885 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 11 14:19:49 crc kubenswrapper[4690]: I1211 14:19:49.081683 4690 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 11 14:19:49 crc kubenswrapper[4690]: I1211 14:19:49.121003 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 11 14:19:49 crc kubenswrapper[4690]: I1211 14:19:49.142267 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 14:19:49 crc kubenswrapper[4690]: I1211 14:19:49.266222 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 11 14:19:49 crc kubenswrapper[4690]: I1211 14:19:49.309457 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 11 14:19:49 crc kubenswrapper[4690]: I1211 14:19:49.362783 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 11 14:19:49 crc kubenswrapper[4690]: I1211 14:19:49.494090 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 11 14:19:49 crc kubenswrapper[4690]: I1211 14:19:49.618032 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 11 14:19:49 crc kubenswrapper[4690]: I1211 14:19:49.640862 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 11 14:19:49 crc kubenswrapper[4690]: I1211 14:19:49.834487 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 11 14:19:49 crc kubenswrapper[4690]: I1211 14:19:49.880266 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 11 14:19:49 crc kubenswrapper[4690]: I1211 14:19:49.953495 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 11 14:19:49 crc kubenswrapper[4690]: I1211 14:19:49.963363 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 11 14:19:49 crc kubenswrapper[4690]: I1211 14:19:49.972907 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 11 14:19:50 crc kubenswrapper[4690]: I1211 14:19:50.104873 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 11 14:19:50 crc kubenswrapper[4690]: I1211 14:19:50.449307 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 11 14:19:50 crc kubenswrapper[4690]: I1211 14:19:50.678883 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 11 14:19:50 crc kubenswrapper[4690]: I1211 14:19:50.696395 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 11 14:19:50 crc kubenswrapper[4690]: I1211 14:19:50.731089 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 11 14:19:50 crc kubenswrapper[4690]: I1211 14:19:50.938338 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 14:19:51 crc kubenswrapper[4690]: I1211 14:19:51.109741 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 11 14:19:51 crc kubenswrapper[4690]: I1211 14:19:51.186135 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 11 14:19:51 crc kubenswrapper[4690]: I1211 14:19:51.375112 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-7bxfp_203500e9-a305-42cd-9909-d64ca944d363/marketplace-operator/2.log" Dec 11 14:19:51 crc kubenswrapper[4690]: I1211 14:19:51.375308 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" event={"ID":"203500e9-a305-42cd-9909-d64ca944d363","Type":"ContainerStarted","Data":"42361ffa2d485df801601f895479e9e86226c0a59290f43f33ac926cebb723bf"} Dec 11 14:19:51 crc kubenswrapper[4690]: I1211 14:19:51.400877 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 11 14:19:51 crc kubenswrapper[4690]: I1211 14:19:51.516503 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 11 14:19:51 crc kubenswrapper[4690]: I1211 14:19:51.594357 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 11 14:19:51 crc kubenswrapper[4690]: I1211 14:19:51.731447 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 11 14:19:51 crc kubenswrapper[4690]: I1211 14:19:51.738637 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 11 14:19:51 crc kubenswrapper[4690]: I1211 14:19:51.842481 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 11 14:19:52 crc kubenswrapper[4690]: I1211 14:19:52.351717 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 11 14:19:52 crc kubenswrapper[4690]: I1211 14:19:52.380667 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:19:52 crc kubenswrapper[4690]: I1211 14:19:52.385290 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" Dec 11 14:19:52 crc kubenswrapper[4690]: I1211 14:19:52.554898 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 11 14:19:52 crc kubenswrapper[4690]: I1211 14:19:52.578255 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 11 14:19:52 crc kubenswrapper[4690]: I1211 14:19:52.848746 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 11 14:19:53 crc kubenswrapper[4690]: I1211 14:19:53.417426 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 11 14:19:53 crc kubenswrapper[4690]: I1211 14:19:53.480353 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 11 14:19:53 crc kubenswrapper[4690]: I1211 14:19:53.568912 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 11 14:19:53 crc kubenswrapper[4690]: I1211 14:19:53.656392 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 11 14:19:53 crc kubenswrapper[4690]: I1211 14:19:53.734711 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 11 14:19:53 crc kubenswrapper[4690]: I1211 14:19:53.790752 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 11 14:19:53 crc kubenswrapper[4690]: I1211 14:19:53.880265 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 11 14:19:54 crc kubenswrapper[4690]: I1211 14:19:54.145880 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 11 14:19:54 crc kubenswrapper[4690]: I1211 14:19:54.404730 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 11 14:19:54 crc kubenswrapper[4690]: I1211 14:19:54.483796 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 11 14:19:54 crc kubenswrapper[4690]: I1211 14:19:54.571993 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 11 14:19:55 crc kubenswrapper[4690]: I1211 14:19:55.080314 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 11 14:19:55 crc kubenswrapper[4690]: I1211 14:19:55.175862 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 11 14:19:55 crc kubenswrapper[4690]: I1211 14:19:55.522196 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 11 14:19:55 crc kubenswrapper[4690]: I1211 14:19:55.644185 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 11 14:19:55 crc kubenswrapper[4690]: I1211 14:19:55.873635 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 11 14:19:55 crc kubenswrapper[4690]: I1211 14:19:55.951685 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.105613 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.676606 4690 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.699372 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.707234 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.948125 4690 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.951382 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=94.951363213 podStartE2EDuration="1m34.951363213s" podCreationTimestamp="2025-12-11 14:18:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:18:52.076872113 +0000 UTC m=+263.692273776" watchObservedRunningTime="2025-12-11 14:19:56.951363213 +0000 UTC m=+328.566764856" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.951684 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-7bxfp" podStartSLOduration=93.951679691 podStartE2EDuration="1m33.951679691s" podCreationTimestamp="2025-12-11 14:18:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:18:55.062436527 +0000 UTC m=+266.677838170" watchObservedRunningTime="2025-12-11 14:19:56.951679691 +0000 UTC m=+328.567081334" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.953061 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-9wx9g","openshift-kube-apiserver/kube-apiserver-crc","openshift-marketplace/certified-operators-fs97w","openshift-marketplace/redhat-marketplace-r7754","openshift-marketplace/redhat-operators-24xfd","openshift-marketplace/certified-operators-djmxn","openshift-marketplace/marketplace-operator-79b997595-r2jbn","openshift-marketplace/community-operators-kmjlt","openshift-marketplace/community-operators-8bfnc","openshift-marketplace/redhat-marketplace-k2lrt","openshift-marketplace/redhat-operators-xq68s"] Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.953152 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-5477954dc8-92kmf","openshift-kube-apiserver/kube-apiserver-crc"] Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.953778 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.953805 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.953819 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" containerName="extract-utilities" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.953828 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" containerName="extract-utilities" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.953837 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" containerName="marketplace-operator" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.953846 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" containerName="marketplace-operator" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.953855 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" containerName="extract-utilities" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.953862 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" containerName="extract-utilities" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.953872 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" containerName="installer" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.953879 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" containerName="installer" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.953891 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.953898 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.953908 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" containerName="extract-content" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.953916 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" containerName="extract-content" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.953926 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" containerName="extract-utilities" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.953933 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" containerName="extract-utilities" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.953944 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.953969 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.953981 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" containerName="extract-utilities" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.953989 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" containerName="extract-utilities" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.953997 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" containerName="extract-content" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954005 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" containerName="extract-content" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.954015 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954021 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.954030 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" containerName="extract-utilities" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954037 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" containerName="extract-utilities" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.954047 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" containerName="extract-utilities" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954055 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" containerName="extract-utilities" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.954065 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" containerName="extract-content" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954072 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" containerName="extract-content" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.954084 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" containerName="extract-content" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954091 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" containerName="extract-content" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.954101 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" containerName="extract-utilities" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954107 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" containerName="extract-utilities" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.954116 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" containerName="extract-content" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954123 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" containerName="extract-content" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.954133 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954140 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.954149 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="787bb691-ec3b-4dad-868a-3dcd2c33f4e1" containerName="oauth-openshift" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954156 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="787bb691-ec3b-4dad-868a-3dcd2c33f4e1" containerName="oauth-openshift" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.954165 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" containerName="extract-content" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954172 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" containerName="extract-content" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.954182 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" containerName="extract-content" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954189 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" containerName="extract-content" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.954198 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954206 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.954216 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" containerName="extract-content" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954223 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" containerName="extract-content" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.954230 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954237 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.954244 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" containerName="extract-utilities" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954250 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" containerName="extract-utilities" Dec 11 14:19:56 crc kubenswrapper[4690]: E1211 14:19:56.954256 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954262 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954356 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954365 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954375 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954387 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954396 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="787bb691-ec3b-4dad-868a-3dcd2c33f4e1" containerName="oauth-openshift" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954408 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="64df140d-3126-42f9-b4d2-a3488a27fb57" containerName="installer" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954418 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954425 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" containerName="marketplace-operator" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954435 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954447 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954456 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" containerName="registry-server" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.954812 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7bxfp"] Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.955175 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.957875 4690 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ca93a1ee-ec99-4255-9ae2-a987cf127929" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.957899 4690 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="ca93a1ee-ec99-4255-9ae2-a987cf127929" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.958852 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.959213 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.959635 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.960098 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.960264 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.960398 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.960510 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.960651 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.961557 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.962864 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.963349 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.964127 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.966679 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.968377 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.970984 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.977837 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 11 14:19:56 crc kubenswrapper[4690]: I1211 14:19:56.996871 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=64.996846918 podStartE2EDuration="1m4.996846918s" podCreationTimestamp="2025-12-11 14:18:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:19:56.99002586 +0000 UTC m=+328.605427543" watchObservedRunningTime="2025-12-11 14:19:56.996846918 +0000 UTC m=+328.612248561" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.025069 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.105453 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.150288 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-service-ca\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.150353 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-user-template-error\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.150377 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrfs2\" (UniqueName: \"kubernetes.io/projected/7ce92cfe-52a8-4111-a076-a34419f97a1f-kube-api-access-zrfs2\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.150402 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-session\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.150583 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-router-certs\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.150756 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.150811 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7ce92cfe-52a8-4111-a076-a34419f97a1f-audit-dir\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.150861 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7ce92cfe-52a8-4111-a076-a34419f97a1f-audit-policies\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.150903 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.151060 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.151122 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.151148 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.151178 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-user-template-login\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.151203 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.242461 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.253066 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.253149 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7ce92cfe-52a8-4111-a076-a34419f97a1f-audit-dir\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.253183 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7ce92cfe-52a8-4111-a076-a34419f97a1f-audit-policies\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.253212 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.253238 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.253257 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.253278 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.253300 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-user-template-login\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.253329 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.253360 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-service-ca\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.253385 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-user-template-error\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.253406 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrfs2\" (UniqueName: \"kubernetes.io/projected/7ce92cfe-52a8-4111-a076-a34419f97a1f-kube-api-access-zrfs2\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.253431 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-session\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.253456 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-router-certs\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.253564 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7ce92cfe-52a8-4111-a076-a34419f97a1f-audit-dir\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.254191 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7ce92cfe-52a8-4111-a076-a34419f97a1f-audit-policies\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.255223 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.255709 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-service-ca\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.258114 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.260570 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.261140 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.261606 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-user-template-login\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.261922 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-router-certs\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.266258 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.270519 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-system-session\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.270599 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-user-template-error\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.270896 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7ce92cfe-52a8-4111-a076-a34419f97a1f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.273720 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrfs2\" (UniqueName: \"kubernetes.io/projected/7ce92cfe-52a8-4111-a076-a34419f97a1f-kube-api-access-zrfs2\") pod \"oauth-openshift-5477954dc8-92kmf\" (UID: \"7ce92cfe-52a8-4111-a076-a34419f97a1f\") " pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.284763 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.420551 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.487345 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-5477954dc8-92kmf"] Dec 11 14:19:57 crc kubenswrapper[4690]: W1211 14:19:57.495788 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ce92cfe_52a8_4111_a076_a34419f97a1f.slice/crio-255f5ab00df7f103fd4a8e58b8fe78362cd015aef9384acc06e8f72938c1a138 WatchSource:0}: Error finding container 255f5ab00df7f103fd4a8e58b8fe78362cd015aef9384acc06e8f72938c1a138: Status 404 returned error can't find the container with id 255f5ab00df7f103fd4a8e58b8fe78362cd015aef9384acc06e8f72938c1a138 Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.650198 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.650265 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:19:57 crc kubenswrapper[4690]: I1211 14:19:57.657084 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:19:58 crc kubenswrapper[4690]: I1211 14:19:58.412717 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" event={"ID":"7ce92cfe-52a8-4111-a076-a34419f97a1f","Type":"ContainerStarted","Data":"255f5ab00df7f103fd4a8e58b8fe78362cd015aef9384acc06e8f72938c1a138"} Dec 11 14:19:58 crc kubenswrapper[4690]: I1211 14:19:58.418116 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 11 14:19:58 crc kubenswrapper[4690]: I1211 14:19:58.641005 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00f7dbc0-5b6f-4f74-a4e4-43759758be95" path="/var/lib/kubelet/pods/00f7dbc0-5b6f-4f74-a4e4-43759758be95/volumes" Dec 11 14:19:58 crc kubenswrapper[4690]: I1211 14:19:58.641692 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09e8430b-2226-4933-9dcf-ee3b5de076c3" path="/var/lib/kubelet/pods/09e8430b-2226-4933-9dcf-ee3b5de076c3/volumes" Dec 11 14:19:58 crc kubenswrapper[4690]: I1211 14:19:58.642300 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c61312d-523c-44a8-a451-dc96bba0f6d7" path="/var/lib/kubelet/pods/0c61312d-523c-44a8-a451-dc96bba0f6d7/volumes" Dec 11 14:19:58 crc kubenswrapper[4690]: I1211 14:19:58.643588 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="213bdee3-5fb6-4221-819e-43ec7a01f555" path="/var/lib/kubelet/pods/213bdee3-5fb6-4221-819e-43ec7a01f555/volumes" Dec 11 14:19:58 crc kubenswrapper[4690]: I1211 14:19:58.644747 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="646a2a29-480a-4725-9407-80d8a4f2a4bb" path="/var/lib/kubelet/pods/646a2a29-480a-4725-9407-80d8a4f2a4bb/volumes" Dec 11 14:19:58 crc kubenswrapper[4690]: I1211 14:19:58.646369 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="787bb691-ec3b-4dad-868a-3dcd2c33f4e1" path="/var/lib/kubelet/pods/787bb691-ec3b-4dad-868a-3dcd2c33f4e1/volumes" Dec 11 14:19:58 crc kubenswrapper[4690]: I1211 14:19:58.652710 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cd40416-92d2-41ec-b6ae-ba668ccc5685" path="/var/lib/kubelet/pods/8cd40416-92d2-41ec-b6ae-ba668ccc5685/volumes" Dec 11 14:19:58 crc kubenswrapper[4690]: I1211 14:19:58.654088 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="907b18d4-d2b5-47c9-9c70-716bd64330ae" path="/var/lib/kubelet/pods/907b18d4-d2b5-47c9-9c70-716bd64330ae/volumes" Dec 11 14:19:58 crc kubenswrapper[4690]: I1211 14:19:58.654845 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9fca9021-9a9f-4d5d-9892-37f39c580323" path="/var/lib/kubelet/pods/9fca9021-9a9f-4d5d-9892-37f39c580323/volumes" Dec 11 14:19:58 crc kubenswrapper[4690]: I1211 14:19:58.656684 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffb685f2-c2fc-4602-8d81-5f11b6581f29" path="/var/lib/kubelet/pods/ffb685f2-c2fc-4602-8d81-5f11b6581f29/volumes" Dec 11 14:19:58 crc kubenswrapper[4690]: I1211 14:19:58.723571 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 11 14:19:59 crc kubenswrapper[4690]: I1211 14:19:59.135678 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 11 14:19:59 crc kubenswrapper[4690]: I1211 14:19:59.421656 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" event={"ID":"7ce92cfe-52a8-4111-a076-a34419f97a1f","Type":"ContainerStarted","Data":"7d732ac40da0929afaca32ac51baacf985fafc8c0315dcd2c0dcc74ee464ddae"} Dec 11 14:20:00 crc kubenswrapper[4690]: I1211 14:20:00.426194 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 11 14:20:00 crc kubenswrapper[4690]: I1211 14:20:00.428051 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:20:00 crc kubenswrapper[4690]: I1211 14:20:00.433122 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" Dec 11 14:20:00 crc kubenswrapper[4690]: I1211 14:20:00.446675 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-5477954dc8-92kmf" podStartSLOduration=102.446650326 podStartE2EDuration="1m42.446650326s" podCreationTimestamp="2025-12-11 14:18:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:20:00.44637981 +0000 UTC m=+332.061781463" watchObservedRunningTime="2025-12-11 14:20:00.446650326 +0000 UTC m=+332.062051969" Dec 11 14:20:00 crc kubenswrapper[4690]: I1211 14:20:00.659549 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 11 14:20:00 crc kubenswrapper[4690]: I1211 14:20:00.671252 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 11 14:20:02 crc kubenswrapper[4690]: I1211 14:20:02.274573 4690 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 14:20:02 crc kubenswrapper[4690]: I1211 14:20:02.275121 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://cecb33951fbd0bfbe17accfbc31acffae97524780d80b25f53c162d2c45ab45b" gracePeriod=5 Dec 11 14:20:03 crc kubenswrapper[4690]: I1211 14:20:03.308627 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.205198 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.205914 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.337048 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.337507 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.337656 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.337849 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.338129 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.337194 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.337754 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.337763 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.337873 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.346573 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.439030 4690 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.439059 4690 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.439069 4690 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.439076 4690 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.439084 4690 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.588988 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.589063 4690 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="cecb33951fbd0bfbe17accfbc31acffae97524780d80b25f53c162d2c45ab45b" exitCode=137 Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.589114 4690 scope.go:117] "RemoveContainer" containerID="cecb33951fbd0bfbe17accfbc31acffae97524780d80b25f53c162d2c45ab45b" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.637168 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.637462 4690 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.646531 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.646793 4690 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="2796bf44-e967-4310-95b1-34aaf1b4b736" Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.649852 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 11 14:20:12 crc kubenswrapper[4690]: I1211 14:20:12.649888 4690 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="2796bf44-e967-4310-95b1-34aaf1b4b736" Dec 11 14:20:13 crc kubenswrapper[4690]: I1211 14:20:13.594688 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 11 14:20:25 crc kubenswrapper[4690]: I1211 14:20:25.860812 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v47hs"] Dec 11 14:20:25 crc kubenswrapper[4690]: I1211 14:20:25.861737 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" podUID="5d3409f3-3730-479c-b48b-e3829fba88ae" containerName="controller-manager" containerID="cri-o://6bbae4252e68e79c9afbefcce64d7d54e93478db36e805d5149ad35acb7d2a8d" gracePeriod=30 Dec 11 14:20:25 crc kubenswrapper[4690]: I1211 14:20:25.967300 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc"] Dec 11 14:20:25 crc kubenswrapper[4690]: I1211 14:20:25.967495 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" podUID="2e221469-9c10-4c08-925b-a4e4e4c3d208" containerName="route-controller-manager" containerID="cri-o://ada26d4bd244788c3df86e07bb4be161eeb5c23df33915a51c3859e5760187cc" gracePeriod=30 Dec 11 14:20:29 crc kubenswrapper[4690]: I1211 14:20:29.673653 4690 generic.go:334] "Generic (PLEG): container finished" podID="5d3409f3-3730-479c-b48b-e3829fba88ae" containerID="6bbae4252e68e79c9afbefcce64d7d54e93478db36e805d5149ad35acb7d2a8d" exitCode=0 Dec 11 14:20:29 crc kubenswrapper[4690]: I1211 14:20:29.673739 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" event={"ID":"5d3409f3-3730-479c-b48b-e3829fba88ae","Type":"ContainerDied","Data":"6bbae4252e68e79c9afbefcce64d7d54e93478db36e805d5149ad35acb7d2a8d"} Dec 11 14:20:30 crc kubenswrapper[4690]: I1211 14:20:30.681056 4690 generic.go:334] "Generic (PLEG): container finished" podID="2e221469-9c10-4c08-925b-a4e4e4c3d208" containerID="ada26d4bd244788c3df86e07bb4be161eeb5c23df33915a51c3859e5760187cc" exitCode=0 Dec 11 14:20:30 crc kubenswrapper[4690]: I1211 14:20:30.681361 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" event={"ID":"2e221469-9c10-4c08-925b-a4e4e4c3d208","Type":"ContainerDied","Data":"ada26d4bd244788c3df86e07bb4be161eeb5c23df33915a51c3859e5760187cc"} Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.035812 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.042123 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.167922 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-config\") pod \"5d3409f3-3730-479c-b48b-e3829fba88ae\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.168015 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2e221469-9c10-4c08-925b-a4e4e4c3d208-client-ca\") pod \"2e221469-9c10-4c08-925b-a4e4e4c3d208\" (UID: \"2e221469-9c10-4c08-925b-a4e4e4c3d208\") " Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.168093 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d6fz\" (UniqueName: \"kubernetes.io/projected/2e221469-9c10-4c08-925b-a4e4e4c3d208-kube-api-access-4d6fz\") pod \"2e221469-9c10-4c08-925b-a4e4e4c3d208\" (UID: \"2e221469-9c10-4c08-925b-a4e4e4c3d208\") " Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.168117 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-proxy-ca-bundles\") pod \"5d3409f3-3730-479c-b48b-e3829fba88ae\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.168136 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e221469-9c10-4c08-925b-a4e4e4c3d208-config\") pod \"2e221469-9c10-4c08-925b-a4e4e4c3d208\" (UID: \"2e221469-9c10-4c08-925b-a4e4e4c3d208\") " Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.168167 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d3409f3-3730-479c-b48b-e3829fba88ae-serving-cert\") pod \"5d3409f3-3730-479c-b48b-e3829fba88ae\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.168190 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-client-ca\") pod \"5d3409f3-3730-479c-b48b-e3829fba88ae\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.168230 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z52cs\" (UniqueName: \"kubernetes.io/projected/5d3409f3-3730-479c-b48b-e3829fba88ae-kube-api-access-z52cs\") pod \"5d3409f3-3730-479c-b48b-e3829fba88ae\" (UID: \"5d3409f3-3730-479c-b48b-e3829fba88ae\") " Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.168251 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e221469-9c10-4c08-925b-a4e4e4c3d208-serving-cert\") pod \"2e221469-9c10-4c08-925b-a4e4e4c3d208\" (UID: \"2e221469-9c10-4c08-925b-a4e4e4c3d208\") " Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.170064 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-config" (OuterVolumeSpecName: "config") pod "5d3409f3-3730-479c-b48b-e3829fba88ae" (UID: "5d3409f3-3730-479c-b48b-e3829fba88ae"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.170064 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e221469-9c10-4c08-925b-a4e4e4c3d208-config" (OuterVolumeSpecName: "config") pod "2e221469-9c10-4c08-925b-a4e4e4c3d208" (UID: "2e221469-9c10-4c08-925b-a4e4e4c3d208"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.170328 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-client-ca" (OuterVolumeSpecName: "client-ca") pod "5d3409f3-3730-479c-b48b-e3829fba88ae" (UID: "5d3409f3-3730-479c-b48b-e3829fba88ae"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.170379 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "5d3409f3-3730-479c-b48b-e3829fba88ae" (UID: "5d3409f3-3730-479c-b48b-e3829fba88ae"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.170524 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2e221469-9c10-4c08-925b-a4e4e4c3d208-client-ca" (OuterVolumeSpecName: "client-ca") pod "2e221469-9c10-4c08-925b-a4e4e4c3d208" (UID: "2e221469-9c10-4c08-925b-a4e4e4c3d208"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.180690 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d3409f3-3730-479c-b48b-e3829fba88ae-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5d3409f3-3730-479c-b48b-e3829fba88ae" (UID: "5d3409f3-3730-479c-b48b-e3829fba88ae"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.182300 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2e221469-9c10-4c08-925b-a4e4e4c3d208-kube-api-access-4d6fz" (OuterVolumeSpecName: "kube-api-access-4d6fz") pod "2e221469-9c10-4c08-925b-a4e4e4c3d208" (UID: "2e221469-9c10-4c08-925b-a4e4e4c3d208"). InnerVolumeSpecName "kube-api-access-4d6fz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.187931 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2e221469-9c10-4c08-925b-a4e4e4c3d208-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "2e221469-9c10-4c08-925b-a4e4e4c3d208" (UID: "2e221469-9c10-4c08-925b-a4e4e4c3d208"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.189533 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d3409f3-3730-479c-b48b-e3829fba88ae-kube-api-access-z52cs" (OuterVolumeSpecName: "kube-api-access-z52cs") pod "5d3409f3-3730-479c-b48b-e3829fba88ae" (UID: "5d3409f3-3730-479c-b48b-e3829fba88ae"). InnerVolumeSpecName "kube-api-access-z52cs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.269905 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d6fz\" (UniqueName: \"kubernetes.io/projected/2e221469-9c10-4c08-925b-a4e4e4c3d208-kube-api-access-4d6fz\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.269962 4690 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.269972 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2e221469-9c10-4c08-925b-a4e4e4c3d208-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.269980 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d3409f3-3730-479c-b48b-e3829fba88ae-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.269989 4690 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.269996 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z52cs\" (UniqueName: \"kubernetes.io/projected/5d3409f3-3730-479c-b48b-e3829fba88ae-kube-api-access-z52cs\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.270004 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2e221469-9c10-4c08-925b-a4e4e4c3d208-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.270011 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d3409f3-3730-479c-b48b-e3829fba88ae-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.270021 4690 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2e221469-9c10-4c08-925b-a4e4e4c3d208-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.698316 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" event={"ID":"5d3409f3-3730-479c-b48b-e3829fba88ae","Type":"ContainerDied","Data":"b86206ff18872cc877ee2fefd617a8e82840a31d2cf6e105a75f3f1ed773cb81"} Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.698410 4690 scope.go:117] "RemoveContainer" containerID="6bbae4252e68e79c9afbefcce64d7d54e93478db36e805d5149ad35acb7d2a8d" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.698396 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-v47hs" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.700256 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" event={"ID":"2e221469-9c10-4c08-925b-a4e4e4c3d208","Type":"ContainerDied","Data":"9187434550edb9bebc3f9c98a2c856ba23dc2d496eeb57b1838eb0afd616b96e"} Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.700315 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.719674 4690 scope.go:117] "RemoveContainer" containerID="ada26d4bd244788c3df86e07bb4be161eeb5c23df33915a51c3859e5760187cc" Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.732995 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc"] Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.735800 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-tlndc"] Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.746714 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v47hs"] Dec 11 14:20:31 crc kubenswrapper[4690]: I1211 14:20:31.751483 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-v47hs"] Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.620580 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-67d56fb549-rxxtm"] Dec 11 14:20:32 crc kubenswrapper[4690]: E1211 14:20:32.620834 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d3409f3-3730-479c-b48b-e3829fba88ae" containerName="controller-manager" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.620853 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d3409f3-3730-479c-b48b-e3829fba88ae" containerName="controller-manager" Dec 11 14:20:32 crc kubenswrapper[4690]: E1211 14:20:32.620868 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.620877 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 11 14:20:32 crc kubenswrapper[4690]: E1211 14:20:32.620898 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2e221469-9c10-4c08-925b-a4e4e4c3d208" containerName="route-controller-manager" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.620907 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="2e221469-9c10-4c08-925b-a4e4e4c3d208" containerName="route-controller-manager" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.621060 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="2e221469-9c10-4c08-925b-a4e4e4c3d208" containerName="route-controller-manager" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.621077 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d3409f3-3730-479c-b48b-e3829fba88ae" containerName="controller-manager" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.621088 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.621504 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.623317 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.623486 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.625970 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.626008 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.626125 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.626570 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.634881 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.637631 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2e221469-9c10-4c08-925b-a4e4e4c3d208" path="/var/lib/kubelet/pods/2e221469-9c10-4c08-925b-a4e4e4c3d208/volumes" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.638160 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d3409f3-3730-479c-b48b-e3829fba88ae" path="/var/lib/kubelet/pods/5d3409f3-3730-479c-b48b-e3829fba88ae/volumes" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.641640 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-67d56fb549-rxxtm"] Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.789264 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8876d8ce-85f0-4018-89ab-53fc5578fd05-client-ca\") pod \"controller-manager-67d56fb549-rxxtm\" (UID: \"8876d8ce-85f0-4018-89ab-53fc5578fd05\") " pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.789339 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9bzl\" (UniqueName: \"kubernetes.io/projected/8876d8ce-85f0-4018-89ab-53fc5578fd05-kube-api-access-v9bzl\") pod \"controller-manager-67d56fb549-rxxtm\" (UID: \"8876d8ce-85f0-4018-89ab-53fc5578fd05\") " pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.789458 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8876d8ce-85f0-4018-89ab-53fc5578fd05-serving-cert\") pod \"controller-manager-67d56fb549-rxxtm\" (UID: \"8876d8ce-85f0-4018-89ab-53fc5578fd05\") " pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.789516 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8876d8ce-85f0-4018-89ab-53fc5578fd05-config\") pod \"controller-manager-67d56fb549-rxxtm\" (UID: \"8876d8ce-85f0-4018-89ab-53fc5578fd05\") " pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.789609 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8876d8ce-85f0-4018-89ab-53fc5578fd05-proxy-ca-bundles\") pod \"controller-manager-67d56fb549-rxxtm\" (UID: \"8876d8ce-85f0-4018-89ab-53fc5578fd05\") " pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.890436 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8876d8ce-85f0-4018-89ab-53fc5578fd05-proxy-ca-bundles\") pod \"controller-manager-67d56fb549-rxxtm\" (UID: \"8876d8ce-85f0-4018-89ab-53fc5578fd05\") " pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.890501 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8876d8ce-85f0-4018-89ab-53fc5578fd05-client-ca\") pod \"controller-manager-67d56fb549-rxxtm\" (UID: \"8876d8ce-85f0-4018-89ab-53fc5578fd05\") " pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.890544 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9bzl\" (UniqueName: \"kubernetes.io/projected/8876d8ce-85f0-4018-89ab-53fc5578fd05-kube-api-access-v9bzl\") pod \"controller-manager-67d56fb549-rxxtm\" (UID: \"8876d8ce-85f0-4018-89ab-53fc5578fd05\") " pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.890569 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8876d8ce-85f0-4018-89ab-53fc5578fd05-serving-cert\") pod \"controller-manager-67d56fb549-rxxtm\" (UID: \"8876d8ce-85f0-4018-89ab-53fc5578fd05\") " pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.890588 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8876d8ce-85f0-4018-89ab-53fc5578fd05-config\") pod \"controller-manager-67d56fb549-rxxtm\" (UID: \"8876d8ce-85f0-4018-89ab-53fc5578fd05\") " pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.891669 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8876d8ce-85f0-4018-89ab-53fc5578fd05-client-ca\") pod \"controller-manager-67d56fb549-rxxtm\" (UID: \"8876d8ce-85f0-4018-89ab-53fc5578fd05\") " pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.891682 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8876d8ce-85f0-4018-89ab-53fc5578fd05-proxy-ca-bundles\") pod \"controller-manager-67d56fb549-rxxtm\" (UID: \"8876d8ce-85f0-4018-89ab-53fc5578fd05\") " pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.892000 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8876d8ce-85f0-4018-89ab-53fc5578fd05-config\") pod \"controller-manager-67d56fb549-rxxtm\" (UID: \"8876d8ce-85f0-4018-89ab-53fc5578fd05\") " pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.901196 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8876d8ce-85f0-4018-89ab-53fc5578fd05-serving-cert\") pod \"controller-manager-67d56fb549-rxxtm\" (UID: \"8876d8ce-85f0-4018-89ab-53fc5578fd05\") " pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.907142 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9bzl\" (UniqueName: \"kubernetes.io/projected/8876d8ce-85f0-4018-89ab-53fc5578fd05-kube-api-access-v9bzl\") pod \"controller-manager-67d56fb549-rxxtm\" (UID: \"8876d8ce-85f0-4018-89ab-53fc5578fd05\") " pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:32 crc kubenswrapper[4690]: I1211 14:20:32.935592 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.176301 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-67d56fb549-rxxtm"] Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.622032 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp"] Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.622626 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.624522 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.624973 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.625068 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.625198 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.625346 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.625424 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.633851 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp"] Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.699746 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chm8g\" (UniqueName: \"kubernetes.io/projected/7cd166a7-c2fb-4523-a8fb-cff2498632b3-kube-api-access-chm8g\") pod \"route-controller-manager-675dc77d8c-sqnwp\" (UID: \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\") " pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.699815 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cd166a7-c2fb-4523-a8fb-cff2498632b3-config\") pod \"route-controller-manager-675dc77d8c-sqnwp\" (UID: \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\") " pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.699862 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7cd166a7-c2fb-4523-a8fb-cff2498632b3-client-ca\") pod \"route-controller-manager-675dc77d8c-sqnwp\" (UID: \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\") " pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.699930 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7cd166a7-c2fb-4523-a8fb-cff2498632b3-serving-cert\") pod \"route-controller-manager-675dc77d8c-sqnwp\" (UID: \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\") " pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.711451 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" event={"ID":"8876d8ce-85f0-4018-89ab-53fc5578fd05","Type":"ContainerStarted","Data":"44f988a93ae710369ff61b9d87e69078148432cf3d107b1d53986934c07b8a90"} Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.801432 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chm8g\" (UniqueName: \"kubernetes.io/projected/7cd166a7-c2fb-4523-a8fb-cff2498632b3-kube-api-access-chm8g\") pod \"route-controller-manager-675dc77d8c-sqnwp\" (UID: \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\") " pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.803084 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cd166a7-c2fb-4523-a8fb-cff2498632b3-config\") pod \"route-controller-manager-675dc77d8c-sqnwp\" (UID: \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\") " pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.803289 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7cd166a7-c2fb-4523-a8fb-cff2498632b3-client-ca\") pod \"route-controller-manager-675dc77d8c-sqnwp\" (UID: \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\") " pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.803684 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7cd166a7-c2fb-4523-a8fb-cff2498632b3-serving-cert\") pod \"route-controller-manager-675dc77d8c-sqnwp\" (UID: \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\") " pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.804239 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7cd166a7-c2fb-4523-a8fb-cff2498632b3-client-ca\") pod \"route-controller-manager-675dc77d8c-sqnwp\" (UID: \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\") " pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.804349 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cd166a7-c2fb-4523-a8fb-cff2498632b3-config\") pod \"route-controller-manager-675dc77d8c-sqnwp\" (UID: \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\") " pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.810123 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7cd166a7-c2fb-4523-a8fb-cff2498632b3-serving-cert\") pod \"route-controller-manager-675dc77d8c-sqnwp\" (UID: \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\") " pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.820868 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chm8g\" (UniqueName: \"kubernetes.io/projected/7cd166a7-c2fb-4523-a8fb-cff2498632b3-kube-api-access-chm8g\") pod \"route-controller-manager-675dc77d8c-sqnwp\" (UID: \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\") " pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:33 crc kubenswrapper[4690]: I1211 14:20:33.989533 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:34 crc kubenswrapper[4690]: I1211 14:20:34.175695 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp"] Dec 11 14:20:34 crc kubenswrapper[4690]: W1211 14:20:34.183733 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7cd166a7_c2fb_4523_a8fb_cff2498632b3.slice/crio-cc441496440c696c2061d360c7f69da9065ad9f251cc76311ed050c58b6b35a0 WatchSource:0}: Error finding container cc441496440c696c2061d360c7f69da9065ad9f251cc76311ed050c58b6b35a0: Status 404 returned error can't find the container with id cc441496440c696c2061d360c7f69da9065ad9f251cc76311ed050c58b6b35a0 Dec 11 14:20:34 crc kubenswrapper[4690]: I1211 14:20:34.716989 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" event={"ID":"7cd166a7-c2fb-4523-a8fb-cff2498632b3","Type":"ContainerStarted","Data":"cc441496440c696c2061d360c7f69da9065ad9f251cc76311ed050c58b6b35a0"} Dec 11 14:20:34 crc kubenswrapper[4690]: I1211 14:20:34.718437 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" event={"ID":"8876d8ce-85f0-4018-89ab-53fc5578fd05","Type":"ContainerStarted","Data":"152ea321189fbb7bbae7bba984388728655f336c7a543f056ce6a81d517dded5"} Dec 11 14:20:35 crc kubenswrapper[4690]: I1211 14:20:35.724799 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" event={"ID":"7cd166a7-c2fb-4523-a8fb-cff2498632b3","Type":"ContainerStarted","Data":"57f5ffa3e18227b7c03d1b1c35931b8cee7cb67b7a46fad3eddeb60fc3eb85eb"} Dec 11 14:20:35 crc kubenswrapper[4690]: I1211 14:20:35.725380 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:35 crc kubenswrapper[4690]: I1211 14:20:35.729723 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" Dec 11 14:20:35 crc kubenswrapper[4690]: I1211 14:20:35.743808 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-67d56fb549-rxxtm" podStartSLOduration=5.743792936 podStartE2EDuration="5.743792936s" podCreationTimestamp="2025-12-11 14:20:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:20:35.741966643 +0000 UTC m=+367.357368286" watchObservedRunningTime="2025-12-11 14:20:35.743792936 +0000 UTC m=+367.359194579" Dec 11 14:20:36 crc kubenswrapper[4690]: I1211 14:20:36.730121 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:36 crc kubenswrapper[4690]: I1211 14:20:36.735286 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:36 crc kubenswrapper[4690]: I1211 14:20:36.750722 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" podStartSLOduration=8.750697228 podStartE2EDuration="8.750697228s" podCreationTimestamp="2025-12-11 14:20:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:20:36.749530511 +0000 UTC m=+368.364932174" watchObservedRunningTime="2025-12-11 14:20:36.750697228 +0000 UTC m=+368.366098871" Dec 11 14:20:45 crc kubenswrapper[4690]: I1211 14:20:45.860487 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp"] Dec 11 14:20:45 crc kubenswrapper[4690]: I1211 14:20:45.861287 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" podUID="7cd166a7-c2fb-4523-a8fb-cff2498632b3" containerName="route-controller-manager" containerID="cri-o://57f5ffa3e18227b7c03d1b1c35931b8cee7cb67b7a46fad3eddeb60fc3eb85eb" gracePeriod=30 Dec 11 14:20:47 crc kubenswrapper[4690]: I1211 14:20:47.783142 4690 generic.go:334] "Generic (PLEG): container finished" podID="7cd166a7-c2fb-4523-a8fb-cff2498632b3" containerID="57f5ffa3e18227b7c03d1b1c35931b8cee7cb67b7a46fad3eddeb60fc3eb85eb" exitCode=0 Dec 11 14:20:47 crc kubenswrapper[4690]: I1211 14:20:47.783248 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" event={"ID":"7cd166a7-c2fb-4523-a8fb-cff2498632b3","Type":"ContainerDied","Data":"57f5ffa3e18227b7c03d1b1c35931b8cee7cb67b7a46fad3eddeb60fc3eb85eb"} Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.322210 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.352321 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp"] Dec 11 14:20:48 crc kubenswrapper[4690]: E1211 14:20:48.352578 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7cd166a7-c2fb-4523-a8fb-cff2498632b3" containerName="route-controller-manager" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.352595 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="7cd166a7-c2fb-4523-a8fb-cff2498632b3" containerName="route-controller-manager" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.352717 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="7cd166a7-c2fb-4523-a8fb-cff2498632b3" containerName="route-controller-manager" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.353199 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.359716 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp"] Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.477580 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chm8g\" (UniqueName: \"kubernetes.io/projected/7cd166a7-c2fb-4523-a8fb-cff2498632b3-kube-api-access-chm8g\") pod \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\" (UID: \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\") " Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.477696 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cd166a7-c2fb-4523-a8fb-cff2498632b3-config\") pod \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\" (UID: \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\") " Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.477737 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7cd166a7-c2fb-4523-a8fb-cff2498632b3-client-ca\") pod \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\" (UID: \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\") " Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.477787 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7cd166a7-c2fb-4523-a8fb-cff2498632b3-serving-cert\") pod \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\" (UID: \"7cd166a7-c2fb-4523-a8fb-cff2498632b3\") " Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.478122 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0e740d1f-312b-4c98-becf-169176429108-client-ca\") pod \"route-controller-manager-6d78679667-hshtp\" (UID: \"0e740d1f-312b-4c98-becf-169176429108\") " pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.478195 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdtfq\" (UniqueName: \"kubernetes.io/projected/0e740d1f-312b-4c98-becf-169176429108-kube-api-access-qdtfq\") pod \"route-controller-manager-6d78679667-hshtp\" (UID: \"0e740d1f-312b-4c98-becf-169176429108\") " pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.478254 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e740d1f-312b-4c98-becf-169176429108-config\") pod \"route-controller-manager-6d78679667-hshtp\" (UID: \"0e740d1f-312b-4c98-becf-169176429108\") " pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.478356 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e740d1f-312b-4c98-becf-169176429108-serving-cert\") pod \"route-controller-manager-6d78679667-hshtp\" (UID: \"0e740d1f-312b-4c98-becf-169176429108\") " pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.478666 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7cd166a7-c2fb-4523-a8fb-cff2498632b3-client-ca" (OuterVolumeSpecName: "client-ca") pod "7cd166a7-c2fb-4523-a8fb-cff2498632b3" (UID: "7cd166a7-c2fb-4523-a8fb-cff2498632b3"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.478710 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7cd166a7-c2fb-4523-a8fb-cff2498632b3-config" (OuterVolumeSpecName: "config") pod "7cd166a7-c2fb-4523-a8fb-cff2498632b3" (UID: "7cd166a7-c2fb-4523-a8fb-cff2498632b3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.483320 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7cd166a7-c2fb-4523-a8fb-cff2498632b3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7cd166a7-c2fb-4523-a8fb-cff2498632b3" (UID: "7cd166a7-c2fb-4523-a8fb-cff2498632b3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.483699 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7cd166a7-c2fb-4523-a8fb-cff2498632b3-kube-api-access-chm8g" (OuterVolumeSpecName: "kube-api-access-chm8g") pod "7cd166a7-c2fb-4523-a8fb-cff2498632b3" (UID: "7cd166a7-c2fb-4523-a8fb-cff2498632b3"). InnerVolumeSpecName "kube-api-access-chm8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.579011 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e740d1f-312b-4c98-becf-169176429108-config\") pod \"route-controller-manager-6d78679667-hshtp\" (UID: \"0e740d1f-312b-4c98-becf-169176429108\") " pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.579077 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e740d1f-312b-4c98-becf-169176429108-serving-cert\") pod \"route-controller-manager-6d78679667-hshtp\" (UID: \"0e740d1f-312b-4c98-becf-169176429108\") " pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.579115 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0e740d1f-312b-4c98-becf-169176429108-client-ca\") pod \"route-controller-manager-6d78679667-hshtp\" (UID: \"0e740d1f-312b-4c98-becf-169176429108\") " pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.579140 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdtfq\" (UniqueName: \"kubernetes.io/projected/0e740d1f-312b-4c98-becf-169176429108-kube-api-access-qdtfq\") pod \"route-controller-manager-6d78679667-hshtp\" (UID: \"0e740d1f-312b-4c98-becf-169176429108\") " pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.579194 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7cd166a7-c2fb-4523-a8fb-cff2498632b3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.579208 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chm8g\" (UniqueName: \"kubernetes.io/projected/7cd166a7-c2fb-4523-a8fb-cff2498632b3-kube-api-access-chm8g\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.579220 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7cd166a7-c2fb-4523-a8fb-cff2498632b3-config\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.579229 4690 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7cd166a7-c2fb-4523-a8fb-cff2498632b3-client-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.580593 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0e740d1f-312b-4c98-becf-169176429108-client-ca\") pod \"route-controller-manager-6d78679667-hshtp\" (UID: \"0e740d1f-312b-4c98-becf-169176429108\") " pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.580891 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e740d1f-312b-4c98-becf-169176429108-config\") pod \"route-controller-manager-6d78679667-hshtp\" (UID: \"0e740d1f-312b-4c98-becf-169176429108\") " pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.585636 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0e740d1f-312b-4c98-becf-169176429108-serving-cert\") pod \"route-controller-manager-6d78679667-hshtp\" (UID: \"0e740d1f-312b-4c98-becf-169176429108\") " pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.595323 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdtfq\" (UniqueName: \"kubernetes.io/projected/0e740d1f-312b-4c98-becf-169176429108-kube-api-access-qdtfq\") pod \"route-controller-manager-6d78679667-hshtp\" (UID: \"0e740d1f-312b-4c98-becf-169176429108\") " pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.671883 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.792595 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" event={"ID":"7cd166a7-c2fb-4523-a8fb-cff2498632b3","Type":"ContainerDied","Data":"cc441496440c696c2061d360c7f69da9065ad9f251cc76311ed050c58b6b35a0"} Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.792664 4690 scope.go:117] "RemoveContainer" containerID="57f5ffa3e18227b7c03d1b1c35931b8cee7cb67b7a46fad3eddeb60fc3eb85eb" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.792667 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp" Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.806503 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp"] Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.812207 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-675dc77d8c-sqnwp"] Dec 11 14:20:48 crc kubenswrapper[4690]: I1211 14:20:48.922117 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp"] Dec 11 14:20:49 crc kubenswrapper[4690]: I1211 14:20:49.801849 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" event={"ID":"0e740d1f-312b-4c98-becf-169176429108","Type":"ContainerStarted","Data":"a1382d71ae76a0763c297eb26417e92538b29a62baffa85eb9c5c32f0a712b9b"} Dec 11 14:20:49 crc kubenswrapper[4690]: I1211 14:20:49.801989 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" Dec 11 14:20:49 crc kubenswrapper[4690]: I1211 14:20:49.802009 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" event={"ID":"0e740d1f-312b-4c98-becf-169176429108","Type":"ContainerStarted","Data":"93a0e343f805d2a244a9d8fb10c3cbddf548d3bedb22c4f6f70e3a3841094e69"} Dec 11 14:20:49 crc kubenswrapper[4690]: I1211 14:20:49.817433 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" Dec 11 14:20:49 crc kubenswrapper[4690]: I1211 14:20:49.823314 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6d78679667-hshtp" podStartSLOduration=4.823292189 podStartE2EDuration="4.823292189s" podCreationTimestamp="2025-12-11 14:20:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:20:49.820550008 +0000 UTC m=+381.435951661" watchObservedRunningTime="2025-12-11 14:20:49.823292189 +0000 UTC m=+381.438693842" Dec 11 14:20:50 crc kubenswrapper[4690]: I1211 14:20:50.639438 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7cd166a7-c2fb-4523-a8fb-cff2498632b3" path="/var/lib/kubelet/pods/7cd166a7-c2fb-4523-a8fb-cff2498632b3/volumes" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.262060 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-x6l5z"] Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.263943 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.320919 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-x6l5z"] Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.395697 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9d1be960-4b58-4faa-b240-fcbf37f5c38e-registry-tls\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.395753 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9d1be960-4b58-4faa-b240-fcbf37f5c38e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.395798 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kh28t\" (UniqueName: \"kubernetes.io/projected/9d1be960-4b58-4faa-b240-fcbf37f5c38e-kube-api-access-kh28t\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.395827 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d1be960-4b58-4faa-b240-fcbf37f5c38e-trusted-ca\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.395863 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9d1be960-4b58-4faa-b240-fcbf37f5c38e-registry-certificates\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.395916 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9d1be960-4b58-4faa-b240-fcbf37f5c38e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.396000 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.396031 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9d1be960-4b58-4faa-b240-fcbf37f5c38e-bound-sa-token\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.414128 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.497082 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9d1be960-4b58-4faa-b240-fcbf37f5c38e-registry-certificates\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.497135 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9d1be960-4b58-4faa-b240-fcbf37f5c38e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.497159 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9d1be960-4b58-4faa-b240-fcbf37f5c38e-bound-sa-token\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.497184 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9d1be960-4b58-4faa-b240-fcbf37f5c38e-registry-tls\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.497204 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9d1be960-4b58-4faa-b240-fcbf37f5c38e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.497232 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kh28t\" (UniqueName: \"kubernetes.io/projected/9d1be960-4b58-4faa-b240-fcbf37f5c38e-kube-api-access-kh28t\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.497252 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d1be960-4b58-4faa-b240-fcbf37f5c38e-trusted-ca\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.498419 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d1be960-4b58-4faa-b240-fcbf37f5c38e-trusted-ca\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.498498 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/9d1be960-4b58-4faa-b240-fcbf37f5c38e-registry-certificates\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.498617 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/9d1be960-4b58-4faa-b240-fcbf37f5c38e-ca-trust-extracted\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.503280 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/9d1be960-4b58-4faa-b240-fcbf37f5c38e-registry-tls\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.503377 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/9d1be960-4b58-4faa-b240-fcbf37f5c38e-installation-pull-secrets\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.512262 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/9d1be960-4b58-4faa-b240-fcbf37f5c38e-bound-sa-token\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.516234 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kh28t\" (UniqueName: \"kubernetes.io/projected/9d1be960-4b58-4faa-b240-fcbf37f5c38e-kube-api-access-kh28t\") pod \"image-registry-66df7c8f76-x6l5z\" (UID: \"9d1be960-4b58-4faa-b240-fcbf37f5c38e\") " pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.578781 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.627533 4690 patch_prober.go:28] interesting pod/machine-config-daemon-z9662 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.627583 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z9662" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.812213 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-x6l5z"] Dec 11 14:21:04 crc kubenswrapper[4690]: W1211 14:21:04.815396 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d1be960_4b58_4faa_b240_fcbf37f5c38e.slice/crio-68c34c3deb8bbd31edd1beff891b8d4325b1efc5832c5db25697176b7a51d7be WatchSource:0}: Error finding container 68c34c3deb8bbd31edd1beff891b8d4325b1efc5832c5db25697176b7a51d7be: Status 404 returned error can't find the container with id 68c34c3deb8bbd31edd1beff891b8d4325b1efc5832c5db25697176b7a51d7be Dec 11 14:21:04 crc kubenswrapper[4690]: I1211 14:21:04.882757 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" event={"ID":"9d1be960-4b58-4faa-b240-fcbf37f5c38e","Type":"ContainerStarted","Data":"68c34c3deb8bbd31edd1beff891b8d4325b1efc5832c5db25697176b7a51d7be"} Dec 11 14:21:06 crc kubenswrapper[4690]: I1211 14:21:06.894852 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" event={"ID":"9d1be960-4b58-4faa-b240-fcbf37f5c38e","Type":"ContainerStarted","Data":"4e8d4e60166df254521079030e0d15b86614e6142a28e4f8d798f1b212019ff2"} Dec 11 14:21:06 crc kubenswrapper[4690]: I1211 14:21:06.895232 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:06 crc kubenswrapper[4690]: I1211 14:21:06.920306 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" podStartSLOduration=2.920287231 podStartE2EDuration="2.920287231s" podCreationTimestamp="2025-12-11 14:21:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-11 14:21:06.92026193 +0000 UTC m=+398.535663583" watchObservedRunningTime="2025-12-11 14:21:06.920287231 +0000 UTC m=+398.535688874" Dec 11 14:21:24 crc kubenswrapper[4690]: I1211 14:21:24.584172 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-x6l5z" Dec 11 14:21:24 crc kubenswrapper[4690]: I1211 14:21:24.659542 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-c5zwr"] Dec 11 14:21:29 crc kubenswrapper[4690]: I1211 14:21:29.260195 4690 scope.go:117] "RemoveContainer" containerID="cf7e1f734f99c12908234fbcb5581753a863e60d4b305ad4599bf66401789e60" Dec 11 14:21:30 crc kubenswrapper[4690]: I1211 14:21:30.996231 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vggvs"] Dec 11 14:21:30 crc kubenswrapper[4690]: I1211 14:21:30.998411 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vggvs" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.000725 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.009481 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vggvs"] Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.143024 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a453fee0-bbb3-46c4-9715-f346d20af283-catalog-content\") pod \"certified-operators-vggvs\" (UID: \"a453fee0-bbb3-46c4-9715-f346d20af283\") " pod="openshift-marketplace/certified-operators-vggvs" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.143093 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a453fee0-bbb3-46c4-9715-f346d20af283-utilities\") pod \"certified-operators-vggvs\" (UID: \"a453fee0-bbb3-46c4-9715-f346d20af283\") " pod="openshift-marketplace/certified-operators-vggvs" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.143135 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzh54\" (UniqueName: \"kubernetes.io/projected/a453fee0-bbb3-46c4-9715-f346d20af283-kube-api-access-qzh54\") pod \"certified-operators-vggvs\" (UID: \"a453fee0-bbb3-46c4-9715-f346d20af283\") " pod="openshift-marketplace/certified-operators-vggvs" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.185097 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rhblg"] Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.186402 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rhblg" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.188737 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.196813 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rhblg"] Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.244032 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35634005-97b7-4311-ac27-f3604cad23a8-catalog-content\") pod \"community-operators-rhblg\" (UID: \"35634005-97b7-4311-ac27-f3604cad23a8\") " pod="openshift-marketplace/community-operators-rhblg" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.244084 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljwzz\" (UniqueName: \"kubernetes.io/projected/35634005-97b7-4311-ac27-f3604cad23a8-kube-api-access-ljwzz\") pod \"community-operators-rhblg\" (UID: \"35634005-97b7-4311-ac27-f3604cad23a8\") " pod="openshift-marketplace/community-operators-rhblg" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.244114 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a453fee0-bbb3-46c4-9715-f346d20af283-catalog-content\") pod \"certified-operators-vggvs\" (UID: \"a453fee0-bbb3-46c4-9715-f346d20af283\") " pod="openshift-marketplace/certified-operators-vggvs" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.244183 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a453fee0-bbb3-46c4-9715-f346d20af283-utilities\") pod \"certified-operators-vggvs\" (UID: \"a453fee0-bbb3-46c4-9715-f346d20af283\") " pod="openshift-marketplace/certified-operators-vggvs" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.244222 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzh54\" (UniqueName: \"kubernetes.io/projected/a453fee0-bbb3-46c4-9715-f346d20af283-kube-api-access-qzh54\") pod \"certified-operators-vggvs\" (UID: \"a453fee0-bbb3-46c4-9715-f346d20af283\") " pod="openshift-marketplace/certified-operators-vggvs" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.244253 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35634005-97b7-4311-ac27-f3604cad23a8-utilities\") pod \"community-operators-rhblg\" (UID: \"35634005-97b7-4311-ac27-f3604cad23a8\") " pod="openshift-marketplace/community-operators-rhblg" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.244781 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a453fee0-bbb3-46c4-9715-f346d20af283-catalog-content\") pod \"certified-operators-vggvs\" (UID: \"a453fee0-bbb3-46c4-9715-f346d20af283\") " pod="openshift-marketplace/certified-operators-vggvs" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.244841 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a453fee0-bbb3-46c4-9715-f346d20af283-utilities\") pod \"certified-operators-vggvs\" (UID: \"a453fee0-bbb3-46c4-9715-f346d20af283\") " pod="openshift-marketplace/certified-operators-vggvs" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.265182 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzh54\" (UniqueName: \"kubernetes.io/projected/a453fee0-bbb3-46c4-9715-f346d20af283-kube-api-access-qzh54\") pod \"certified-operators-vggvs\" (UID: \"a453fee0-bbb3-46c4-9715-f346d20af283\") " pod="openshift-marketplace/certified-operators-vggvs" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.321818 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vggvs" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.345825 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35634005-97b7-4311-ac27-f3604cad23a8-utilities\") pod \"community-operators-rhblg\" (UID: \"35634005-97b7-4311-ac27-f3604cad23a8\") " pod="openshift-marketplace/community-operators-rhblg" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.346378 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35634005-97b7-4311-ac27-f3604cad23a8-catalog-content\") pod \"community-operators-rhblg\" (UID: \"35634005-97b7-4311-ac27-f3604cad23a8\") " pod="openshift-marketplace/community-operators-rhblg" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.346411 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljwzz\" (UniqueName: \"kubernetes.io/projected/35634005-97b7-4311-ac27-f3604cad23a8-kube-api-access-ljwzz\") pod \"community-operators-rhblg\" (UID: \"35634005-97b7-4311-ac27-f3604cad23a8\") " pod="openshift-marketplace/community-operators-rhblg" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.346314 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35634005-97b7-4311-ac27-f3604cad23a8-utilities\") pod \"community-operators-rhblg\" (UID: \"35634005-97b7-4311-ac27-f3604cad23a8\") " pod="openshift-marketplace/community-operators-rhblg" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.346889 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35634005-97b7-4311-ac27-f3604cad23a8-catalog-content\") pod \"community-operators-rhblg\" (UID: \"35634005-97b7-4311-ac27-f3604cad23a8\") " pod="openshift-marketplace/community-operators-rhblg" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.364360 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljwzz\" (UniqueName: \"kubernetes.io/projected/35634005-97b7-4311-ac27-f3604cad23a8-kube-api-access-ljwzz\") pod \"community-operators-rhblg\" (UID: \"35634005-97b7-4311-ac27-f3604cad23a8\") " pod="openshift-marketplace/community-operators-rhblg" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.500628 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rhblg" Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.703613 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vggvs"] Dec 11 14:21:31 crc kubenswrapper[4690]: W1211 14:21:31.705538 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda453fee0_bbb3_46c4_9715_f346d20af283.slice/crio-4c02df6e79afedeaf8b2298a4b31f20e530414e2b918c114013d2d50eeb47015 WatchSource:0}: Error finding container 4c02df6e79afedeaf8b2298a4b31f20e530414e2b918c114013d2d50eeb47015: Status 404 returned error can't find the container with id 4c02df6e79afedeaf8b2298a4b31f20e530414e2b918c114013d2d50eeb47015 Dec 11 14:21:31 crc kubenswrapper[4690]: I1211 14:21:31.903275 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rhblg"] Dec 11 14:21:31 crc kubenswrapper[4690]: W1211 14:21:31.908757 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35634005_97b7_4311_ac27_f3604cad23a8.slice/crio-b1cb2fdf4ab163d790a9092da4747c290119d6210dd45b87211dd86fd831c282 WatchSource:0}: Error finding container b1cb2fdf4ab163d790a9092da4747c290119d6210dd45b87211dd86fd831c282: Status 404 returned error can't find the container with id b1cb2fdf4ab163d790a9092da4747c290119d6210dd45b87211dd86fd831c282 Dec 11 14:21:32 crc kubenswrapper[4690]: I1211 14:21:32.028717 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vggvs" event={"ID":"a453fee0-bbb3-46c4-9715-f346d20af283","Type":"ContainerStarted","Data":"4c02df6e79afedeaf8b2298a4b31f20e530414e2b918c114013d2d50eeb47015"} Dec 11 14:21:32 crc kubenswrapper[4690]: I1211 14:21:32.030233 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rhblg" event={"ID":"35634005-97b7-4311-ac27-f3604cad23a8","Type":"ContainerStarted","Data":"b1cb2fdf4ab163d790a9092da4747c290119d6210dd45b87211dd86fd831c282"} Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.385708 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-x8gc9"] Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.386971 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x8gc9" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.389074 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.393190 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x8gc9"] Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.483630 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/278cc4a4-c5e0-49c9-ac6c-27951b716160-catalog-content\") pod \"redhat-marketplace-x8gc9\" (UID: \"278cc4a4-c5e0-49c9-ac6c-27951b716160\") " pod="openshift-marketplace/redhat-marketplace-x8gc9" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.483739 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/278cc4a4-c5e0-49c9-ac6c-27951b716160-utilities\") pod \"redhat-marketplace-x8gc9\" (UID: \"278cc4a4-c5e0-49c9-ac6c-27951b716160\") " pod="openshift-marketplace/redhat-marketplace-x8gc9" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.483766 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pq84j\" (UniqueName: \"kubernetes.io/projected/278cc4a4-c5e0-49c9-ac6c-27951b716160-kube-api-access-pq84j\") pod \"redhat-marketplace-x8gc9\" (UID: \"278cc4a4-c5e0-49c9-ac6c-27951b716160\") " pod="openshift-marketplace/redhat-marketplace-x8gc9" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.585577 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/278cc4a4-c5e0-49c9-ac6c-27951b716160-catalog-content\") pod \"redhat-marketplace-x8gc9\" (UID: \"278cc4a4-c5e0-49c9-ac6c-27951b716160\") " pod="openshift-marketplace/redhat-marketplace-x8gc9" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.585636 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/278cc4a4-c5e0-49c9-ac6c-27951b716160-utilities\") pod \"redhat-marketplace-x8gc9\" (UID: \"278cc4a4-c5e0-49c9-ac6c-27951b716160\") " pod="openshift-marketplace/redhat-marketplace-x8gc9" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.585678 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pq84j\" (UniqueName: \"kubernetes.io/projected/278cc4a4-c5e0-49c9-ac6c-27951b716160-kube-api-access-pq84j\") pod \"redhat-marketplace-x8gc9\" (UID: \"278cc4a4-c5e0-49c9-ac6c-27951b716160\") " pod="openshift-marketplace/redhat-marketplace-x8gc9" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.586088 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/278cc4a4-c5e0-49c9-ac6c-27951b716160-catalog-content\") pod \"redhat-marketplace-x8gc9\" (UID: \"278cc4a4-c5e0-49c9-ac6c-27951b716160\") " pod="openshift-marketplace/redhat-marketplace-x8gc9" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.586166 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/278cc4a4-c5e0-49c9-ac6c-27951b716160-utilities\") pod \"redhat-marketplace-x8gc9\" (UID: \"278cc4a4-c5e0-49c9-ac6c-27951b716160\") " pod="openshift-marketplace/redhat-marketplace-x8gc9" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.589994 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kmbqb"] Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.591008 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kmbqb" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.593823 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.597410 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kmbqb"] Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.616742 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pq84j\" (UniqueName: \"kubernetes.io/projected/278cc4a4-c5e0-49c9-ac6c-27951b716160-kube-api-access-pq84j\") pod \"redhat-marketplace-x8gc9\" (UID: \"278cc4a4-c5e0-49c9-ac6c-27951b716160\") " pod="openshift-marketplace/redhat-marketplace-x8gc9" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.687445 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/175f6674-4af9-4b35-aa85-c7d54c07abbe-catalog-content\") pod \"redhat-operators-kmbqb\" (UID: \"175f6674-4af9-4b35-aa85-c7d54c07abbe\") " pod="openshift-marketplace/redhat-operators-kmbqb" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.687973 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/175f6674-4af9-4b35-aa85-c7d54c07abbe-utilities\") pod \"redhat-operators-kmbqb\" (UID: \"175f6674-4af9-4b35-aa85-c7d54c07abbe\") " pod="openshift-marketplace/redhat-operators-kmbqb" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.688152 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74bwq\" (UniqueName: \"kubernetes.io/projected/175f6674-4af9-4b35-aa85-c7d54c07abbe-kube-api-access-74bwq\") pod \"redhat-operators-kmbqb\" (UID: \"175f6674-4af9-4b35-aa85-c7d54c07abbe\") " pod="openshift-marketplace/redhat-operators-kmbqb" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.739141 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x8gc9" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.791113 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74bwq\" (UniqueName: \"kubernetes.io/projected/175f6674-4af9-4b35-aa85-c7d54c07abbe-kube-api-access-74bwq\") pod \"redhat-operators-kmbqb\" (UID: \"175f6674-4af9-4b35-aa85-c7d54c07abbe\") " pod="openshift-marketplace/redhat-operators-kmbqb" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.792616 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/175f6674-4af9-4b35-aa85-c7d54c07abbe-catalog-content\") pod \"redhat-operators-kmbqb\" (UID: \"175f6674-4af9-4b35-aa85-c7d54c07abbe\") " pod="openshift-marketplace/redhat-operators-kmbqb" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.793003 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/175f6674-4af9-4b35-aa85-c7d54c07abbe-utilities\") pod \"redhat-operators-kmbqb\" (UID: \"175f6674-4af9-4b35-aa85-c7d54c07abbe\") " pod="openshift-marketplace/redhat-operators-kmbqb" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.793445 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/175f6674-4af9-4b35-aa85-c7d54c07abbe-catalog-content\") pod \"redhat-operators-kmbqb\" (UID: \"175f6674-4af9-4b35-aa85-c7d54c07abbe\") " pod="openshift-marketplace/redhat-operators-kmbqb" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.794125 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/175f6674-4af9-4b35-aa85-c7d54c07abbe-utilities\") pod \"redhat-operators-kmbqb\" (UID: \"175f6674-4af9-4b35-aa85-c7d54c07abbe\") " pod="openshift-marketplace/redhat-operators-kmbqb" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.810533 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74bwq\" (UniqueName: \"kubernetes.io/projected/175f6674-4af9-4b35-aa85-c7d54c07abbe-kube-api-access-74bwq\") pod \"redhat-operators-kmbqb\" (UID: \"175f6674-4af9-4b35-aa85-c7d54c07abbe\") " pod="openshift-marketplace/redhat-operators-kmbqb" Dec 11 14:21:33 crc kubenswrapper[4690]: I1211 14:21:33.916947 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kmbqb" Dec 11 14:21:34 crc kubenswrapper[4690]: I1211 14:21:34.047287 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vggvs" event={"ID":"a453fee0-bbb3-46c4-9715-f346d20af283","Type":"ContainerStarted","Data":"2be9e79d702936b428afa4f2d79a5a7cdfaec28688cdb6445ae9e01dd3693ebc"} Dec 11 14:21:34 crc kubenswrapper[4690]: I1211 14:21:34.115269 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x8gc9"] Dec 11 14:21:34 crc kubenswrapper[4690]: W1211 14:21:34.123159 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod278cc4a4_c5e0_49c9_ac6c_27951b716160.slice/crio-b440d727de1d31bf6a7663aa40180349d718a44a4e316c764d138cf5841bda1b WatchSource:0}: Error finding container b440d727de1d31bf6a7663aa40180349d718a44a4e316c764d138cf5841bda1b: Status 404 returned error can't find the container with id b440d727de1d31bf6a7663aa40180349d718a44a4e316c764d138cf5841bda1b Dec 11 14:21:34 crc kubenswrapper[4690]: I1211 14:21:34.310992 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kmbqb"] Dec 11 14:21:34 crc kubenswrapper[4690]: W1211 14:21:34.320931 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod175f6674_4af9_4b35_aa85_c7d54c07abbe.slice/crio-e5c52d8e203ca0a66a57ceb1d8226bf9deb09f1e1ba26d9a3d3cbbdc7054ce15 WatchSource:0}: Error finding container e5c52d8e203ca0a66a57ceb1d8226bf9deb09f1e1ba26d9a3d3cbbdc7054ce15: Status 404 returned error can't find the container with id e5c52d8e203ca0a66a57ceb1d8226bf9deb09f1e1ba26d9a3d3cbbdc7054ce15 Dec 11 14:21:34 crc kubenswrapper[4690]: I1211 14:21:34.627472 4690 patch_prober.go:28] interesting pod/machine-config-daemon-z9662 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 14:21:34 crc kubenswrapper[4690]: I1211 14:21:34.627527 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z9662" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 14:21:35 crc kubenswrapper[4690]: I1211 14:21:35.054401 4690 generic.go:334] "Generic (PLEG): container finished" podID="a453fee0-bbb3-46c4-9715-f346d20af283" containerID="2be9e79d702936b428afa4f2d79a5a7cdfaec28688cdb6445ae9e01dd3693ebc" exitCode=0 Dec 11 14:21:35 crc kubenswrapper[4690]: I1211 14:21:35.054479 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vggvs" event={"ID":"a453fee0-bbb3-46c4-9715-f346d20af283","Type":"ContainerDied","Data":"2be9e79d702936b428afa4f2d79a5a7cdfaec28688cdb6445ae9e01dd3693ebc"} Dec 11 14:21:35 crc kubenswrapper[4690]: I1211 14:21:35.055914 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8gc9" event={"ID":"278cc4a4-c5e0-49c9-ac6c-27951b716160","Type":"ContainerStarted","Data":"b440d727de1d31bf6a7663aa40180349d718a44a4e316c764d138cf5841bda1b"} Dec 11 14:21:35 crc kubenswrapper[4690]: I1211 14:21:35.057115 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kmbqb" event={"ID":"175f6674-4af9-4b35-aa85-c7d54c07abbe","Type":"ContainerStarted","Data":"e5c52d8e203ca0a66a57ceb1d8226bf9deb09f1e1ba26d9a3d3cbbdc7054ce15"} Dec 11 14:21:35 crc kubenswrapper[4690]: I1211 14:21:35.058440 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rhblg" event={"ID":"35634005-97b7-4311-ac27-f3604cad23a8","Type":"ContainerStarted","Data":"fd85a95348c5e87836d73ff2a4b3d2596ab8c8f73343d2ea71782859540b6f0e"} Dec 11 14:21:36 crc kubenswrapper[4690]: I1211 14:21:36.070577 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kmbqb" event={"ID":"175f6674-4af9-4b35-aa85-c7d54c07abbe","Type":"ContainerStarted","Data":"590334db41cb2973b37d7137c15f7eff2337ac03d0de8858d65cd917527d4b26"} Dec 11 14:21:36 crc kubenswrapper[4690]: I1211 14:21:36.072030 4690 generic.go:334] "Generic (PLEG): container finished" podID="35634005-97b7-4311-ac27-f3604cad23a8" containerID="fd85a95348c5e87836d73ff2a4b3d2596ab8c8f73343d2ea71782859540b6f0e" exitCode=0 Dec 11 14:21:36 crc kubenswrapper[4690]: I1211 14:21:36.072077 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rhblg" event={"ID":"35634005-97b7-4311-ac27-f3604cad23a8","Type":"ContainerDied","Data":"fd85a95348c5e87836d73ff2a4b3d2596ab8c8f73343d2ea71782859540b6f0e"} Dec 11 14:21:36 crc kubenswrapper[4690]: I1211 14:21:36.075383 4690 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 11 14:21:37 crc kubenswrapper[4690]: I1211 14:21:37.079186 4690 generic.go:334] "Generic (PLEG): container finished" podID="278cc4a4-c5e0-49c9-ac6c-27951b716160" containerID="364ba9e2968dd6cf3a4bb7ef550e82a791ff91ff0cf3d5fdf6854932a5060ecb" exitCode=0 Dec 11 14:21:37 crc kubenswrapper[4690]: I1211 14:21:37.079255 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8gc9" event={"ID":"278cc4a4-c5e0-49c9-ac6c-27951b716160","Type":"ContainerDied","Data":"364ba9e2968dd6cf3a4bb7ef550e82a791ff91ff0cf3d5fdf6854932a5060ecb"} Dec 11 14:21:37 crc kubenswrapper[4690]: I1211 14:21:37.080876 4690 generic.go:334] "Generic (PLEG): container finished" podID="175f6674-4af9-4b35-aa85-c7d54c07abbe" containerID="590334db41cb2973b37d7137c15f7eff2337ac03d0de8858d65cd917527d4b26" exitCode=0 Dec 11 14:21:37 crc kubenswrapper[4690]: I1211 14:21:37.080981 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kmbqb" event={"ID":"175f6674-4af9-4b35-aa85-c7d54c07abbe","Type":"ContainerDied","Data":"590334db41cb2973b37d7137c15f7eff2337ac03d0de8858d65cd917527d4b26"} Dec 11 14:21:43 crc kubenswrapper[4690]: I1211 14:21:43.116067 4690 generic.go:334] "Generic (PLEG): container finished" podID="a453fee0-bbb3-46c4-9715-f346d20af283" containerID="7316c07a1a455915a24cb67a3adfab509f38cfd895ba746c5d941d4e944d2af2" exitCode=0 Dec 11 14:21:43 crc kubenswrapper[4690]: I1211 14:21:43.116640 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vggvs" event={"ID":"a453fee0-bbb3-46c4-9715-f346d20af283","Type":"ContainerDied","Data":"7316c07a1a455915a24cb67a3adfab509f38cfd895ba746c5d941d4e944d2af2"} Dec 11 14:21:43 crc kubenswrapper[4690]: I1211 14:21:43.128995 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rhblg" event={"ID":"35634005-97b7-4311-ac27-f3604cad23a8","Type":"ContainerStarted","Data":"830c78b7b52a9d0d22ef13e1050468d127ba2354c5ff5a3affdbf2569e6c49b3"} Dec 11 14:21:44 crc kubenswrapper[4690]: I1211 14:21:44.136452 4690 generic.go:334] "Generic (PLEG): container finished" podID="278cc4a4-c5e0-49c9-ac6c-27951b716160" containerID="96025ecf861541db894e6f60f16b4f048ecb26bbded00e76cfb77c18d9ddebda" exitCode=0 Dec 11 14:21:44 crc kubenswrapper[4690]: I1211 14:21:44.136529 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8gc9" event={"ID":"278cc4a4-c5e0-49c9-ac6c-27951b716160","Type":"ContainerDied","Data":"96025ecf861541db894e6f60f16b4f048ecb26bbded00e76cfb77c18d9ddebda"} Dec 11 14:21:44 crc kubenswrapper[4690]: I1211 14:21:44.138758 4690 generic.go:334] "Generic (PLEG): container finished" podID="175f6674-4af9-4b35-aa85-c7d54c07abbe" containerID="4c244aa82d27cffe84048d2753295b3b36c16992f08c90a0eae95a2ee99bdead" exitCode=0 Dec 11 14:21:44 crc kubenswrapper[4690]: I1211 14:21:44.138835 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kmbqb" event={"ID":"175f6674-4af9-4b35-aa85-c7d54c07abbe","Type":"ContainerDied","Data":"4c244aa82d27cffe84048d2753295b3b36c16992f08c90a0eae95a2ee99bdead"} Dec 11 14:21:44 crc kubenswrapper[4690]: I1211 14:21:44.154806 4690 generic.go:334] "Generic (PLEG): container finished" podID="35634005-97b7-4311-ac27-f3604cad23a8" containerID="830c78b7b52a9d0d22ef13e1050468d127ba2354c5ff5a3affdbf2569e6c49b3" exitCode=0 Dec 11 14:21:44 crc kubenswrapper[4690]: I1211 14:21:44.154912 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rhblg" event={"ID":"35634005-97b7-4311-ac27-f3604cad23a8","Type":"ContainerDied","Data":"830c78b7b52a9d0d22ef13e1050468d127ba2354c5ff5a3affdbf2569e6c49b3"} Dec 11 14:21:47 crc kubenswrapper[4690]: I1211 14:21:47.184128 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vggvs" event={"ID":"a453fee0-bbb3-46c4-9715-f346d20af283","Type":"ContainerStarted","Data":"91ac8307e3f4aa95f60ef04294a6f80186723428e6de0a98a0698d0cf9ff73f7"} Dec 11 14:21:49 crc kubenswrapper[4690]: I1211 14:21:49.697437 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" podUID="51f5a15c-718b-4daf-9ea9-9bdd4ed84f73" containerName="registry" containerID="cri-o://5ff716de2a7abceafdc159638b1f7ba7dd0951fec6093433c8143afb3c0f8ef8" gracePeriod=30 Dec 11 14:21:51 crc kubenswrapper[4690]: I1211 14:21:51.322819 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vggvs" Dec 11 14:21:51 crc kubenswrapper[4690]: I1211 14:21:51.323237 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vggvs" Dec 11 14:21:51 crc kubenswrapper[4690]: I1211 14:21:51.367004 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vggvs" Dec 11 14:21:51 crc kubenswrapper[4690]: I1211 14:21:51.386798 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vggvs" podStartSLOduration=11.931878483 podStartE2EDuration="21.386782551s" podCreationTimestamp="2025-12-11 14:21:30 +0000 UTC" firstStartedPulling="2025-12-11 14:21:36.07509694 +0000 UTC m=+427.690498583" lastFinishedPulling="2025-12-11 14:21:45.530001008 +0000 UTC m=+437.145402651" observedRunningTime="2025-12-11 14:21:48.209755576 +0000 UTC m=+439.825157219" watchObservedRunningTime="2025-12-11 14:21:51.386782551 +0000 UTC m=+443.002184194" Dec 11 14:21:52 crc kubenswrapper[4690]: I1211 14:21:52.247916 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vggvs" Dec 11 14:21:53 crc kubenswrapper[4690]: I1211 14:21:53.440098 4690 patch_prober.go:28] interesting pod/image-registry-697d97f7c8-c5zwr container/registry namespace/openshift-image-registry: Readiness probe status=failure output="Get \"https://10.217.0.20:5000/healthz\": dial tcp 10.217.0.20:5000: connect: connection refused" start-of-body= Dec 11 14:21:53 crc kubenswrapper[4690]: I1211 14:21:53.440754 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" podUID="51f5a15c-718b-4daf-9ea9-9bdd4ed84f73" containerName="registry" probeResult="failure" output="Get \"https://10.217.0.20:5000/healthz\": dial tcp 10.217.0.20:5000: connect: connection refused" Dec 11 14:21:58 crc kubenswrapper[4690]: I1211 14:21:58.913674 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-image-registry_image-registry-697d97f7c8-c5zwr_51f5a15c-718b-4daf-9ea9-9bdd4ed84f73/registry/0.log" Dec 11 14:21:58 crc kubenswrapper[4690]: I1211 14:21:58.915116 4690 generic.go:334] "Generic (PLEG): container finished" podID="51f5a15c-718b-4daf-9ea9-9bdd4ed84f73" containerID="5ff716de2a7abceafdc159638b1f7ba7dd0951fec6093433c8143afb3c0f8ef8" exitCode=-1 Dec 11 14:21:58 crc kubenswrapper[4690]: I1211 14:21:58.915346 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" event={"ID":"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73","Type":"ContainerDied","Data":"5ff716de2a7abceafdc159638b1f7ba7dd0951fec6093433c8143afb3c0f8ef8"} Dec 11 14:21:59 crc kubenswrapper[4690]: I1211 14:21:59.924437 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8gc9" event={"ID":"278cc4a4-c5e0-49c9-ac6c-27951b716160","Type":"ContainerStarted","Data":"4f833f6ae311ee1fc2d3590a3ccdff3fb6baf780d760c28ec4a3ec718c5b2a04"} Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.056273 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.080619 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-x8gc9" podStartSLOduration=17.923706058 podStartE2EDuration="28.080597296s" podCreationTimestamp="2025-12-11 14:21:33 +0000 UTC" firstStartedPulling="2025-12-11 14:21:38.088597791 +0000 UTC m=+429.703999434" lastFinishedPulling="2025-12-11 14:21:48.245489019 +0000 UTC m=+439.860890672" observedRunningTime="2025-12-11 14:22:00.950491142 +0000 UTC m=+452.565892835" watchObservedRunningTime="2025-12-11 14:22:01.080597296 +0000 UTC m=+452.695998939" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.150576 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-trusted-ca\") pod \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.150634 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-ca-trust-extracted\") pod \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.150776 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.150813 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c66d4\" (UniqueName: \"kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-kube-api-access-c66d4\") pod \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.150853 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-installation-pull-secrets\") pod \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.150881 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-bound-sa-token\") pod \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.150906 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-registry-certificates\") pod \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.150944 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-registry-tls\") pod \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\" (UID: \"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73\") " Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.151611 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.151739 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.157583 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.157583 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.157911 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-kube-api-access-c66d4" (OuterVolumeSpecName: "kube-api-access-c66d4") pod "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73"). InnerVolumeSpecName "kube-api-access-c66d4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.158345 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.161448 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.174591 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73" (UID: "51f5a15c-718b-4daf-9ea9-9bdd4ed84f73"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.253034 4690 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.253083 4690 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.253096 4690 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.253110 4690 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.253124 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.253134 4690 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.253162 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c66d4\" (UniqueName: \"kubernetes.io/projected/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73-kube-api-access-c66d4\") on node \"crc\" DevicePath \"\"" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.936839 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" event={"ID":"51f5a15c-718b-4daf-9ea9-9bdd4ed84f73","Type":"ContainerDied","Data":"3996c2df1064bcbf630bc1def684fa83fc4a603103179d8de91a7a991c1040fe"} Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.936903 4690 scope.go:117] "RemoveContainer" containerID="5ff716de2a7abceafdc159638b1f7ba7dd0951fec6093433c8143afb3c0f8ef8" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.936923 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-c5zwr" Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.974316 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-c5zwr"] Dec 11 14:22:01 crc kubenswrapper[4690]: I1211 14:22:01.981171 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-c5zwr"] Dec 11 14:22:02 crc kubenswrapper[4690]: I1211 14:22:02.636853 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51f5a15c-718b-4daf-9ea9-9bdd4ed84f73" path="/var/lib/kubelet/pods/51f5a15c-718b-4daf-9ea9-9bdd4ed84f73/volumes" Dec 11 14:22:02 crc kubenswrapper[4690]: I1211 14:22:02.952509 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kmbqb" event={"ID":"175f6674-4af9-4b35-aa85-c7d54c07abbe","Type":"ContainerStarted","Data":"7182cd6445b1a0510e2a3fce1422e8cb48e0633701d7010df641c0bc3edf7d15"} Dec 11 14:22:02 crc kubenswrapper[4690]: I1211 14:22:02.954571 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rhblg" event={"ID":"35634005-97b7-4311-ac27-f3604cad23a8","Type":"ContainerStarted","Data":"b33335221c3748a99083586bc70b753031c0c35ef91785a71febe879b2ace93d"} Dec 11 14:22:02 crc kubenswrapper[4690]: I1211 14:22:02.971056 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kmbqb" podStartSLOduration=5.040533239 podStartE2EDuration="29.971033575s" podCreationTimestamp="2025-12-11 14:21:33 +0000 UTC" firstStartedPulling="2025-12-11 14:21:37.082689692 +0000 UTC m=+428.698091335" lastFinishedPulling="2025-12-11 14:22:02.013190008 +0000 UTC m=+453.628591671" observedRunningTime="2025-12-11 14:22:02.968442628 +0000 UTC m=+454.583844271" watchObservedRunningTime="2025-12-11 14:22:02.971033575 +0000 UTC m=+454.586435218" Dec 11 14:22:02 crc kubenswrapper[4690]: I1211 14:22:02.987506 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rhblg" podStartSLOduration=6.87393899 podStartE2EDuration="31.98748704s" podCreationTimestamp="2025-12-11 14:21:31 +0000 UTC" firstStartedPulling="2025-12-11 14:21:37.082659401 +0000 UTC m=+428.698061054" lastFinishedPulling="2025-12-11 14:22:02.196207461 +0000 UTC m=+453.811609104" observedRunningTime="2025-12-11 14:22:02.987157342 +0000 UTC m=+454.602558985" watchObservedRunningTime="2025-12-11 14:22:02.98748704 +0000 UTC m=+454.602888683" Dec 11 14:22:03 crc kubenswrapper[4690]: I1211 14:22:03.739923 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-x8gc9" Dec 11 14:22:03 crc kubenswrapper[4690]: I1211 14:22:03.740601 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-x8gc9" Dec 11 14:22:03 crc kubenswrapper[4690]: I1211 14:22:03.780870 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-x8gc9" Dec 11 14:22:03 crc kubenswrapper[4690]: I1211 14:22:03.917572 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kmbqb" Dec 11 14:22:03 crc kubenswrapper[4690]: I1211 14:22:03.917664 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kmbqb" Dec 11 14:22:03 crc kubenswrapper[4690]: I1211 14:22:03.999520 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-x8gc9" Dec 11 14:22:04 crc kubenswrapper[4690]: I1211 14:22:04.627262 4690 patch_prober.go:28] interesting pod/machine-config-daemon-z9662 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 14:22:04 crc kubenswrapper[4690]: I1211 14:22:04.627319 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z9662" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 14:22:04 crc kubenswrapper[4690]: I1211 14:22:04.627363 4690 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-z9662" Dec 11 14:22:04 crc kubenswrapper[4690]: I1211 14:22:04.627899 4690 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1c80cd2a90322545319a0730a69c8f3377cd248081b58abd3a0b769926d09d16"} pod="openshift-machine-config-operator/machine-config-daemon-z9662" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 14:22:04 crc kubenswrapper[4690]: I1211 14:22:04.627968 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-z9662" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" containerName="machine-config-daemon" containerID="cri-o://1c80cd2a90322545319a0730a69c8f3377cd248081b58abd3a0b769926d09d16" gracePeriod=600 Dec 11 14:22:04 crc kubenswrapper[4690]: I1211 14:22:04.951595 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kmbqb" podUID="175f6674-4af9-4b35-aa85-c7d54c07abbe" containerName="registry-server" probeResult="failure" output=< Dec 11 14:22:04 crc kubenswrapper[4690]: timeout: failed to connect service ":50051" within 1s Dec 11 14:22:04 crc kubenswrapper[4690]: > Dec 11 14:22:05 crc kubenswrapper[4690]: I1211 14:22:05.971587 4690 generic.go:334] "Generic (PLEG): container finished" podID="44a7b31b-09dd-452b-87ba-29764eaa0206" containerID="1c80cd2a90322545319a0730a69c8f3377cd248081b58abd3a0b769926d09d16" exitCode=0 Dec 11 14:22:05 crc kubenswrapper[4690]: I1211 14:22:05.971688 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z9662" event={"ID":"44a7b31b-09dd-452b-87ba-29764eaa0206","Type":"ContainerDied","Data":"1c80cd2a90322545319a0730a69c8f3377cd248081b58abd3a0b769926d09d16"} Dec 11 14:22:05 crc kubenswrapper[4690]: I1211 14:22:05.972047 4690 scope.go:117] "RemoveContainer" containerID="0e4832b2b6dce8997cd247f1916ab6a197fa3f64006449b295e150efb8bd4eb7" Dec 11 14:22:08 crc kubenswrapper[4690]: I1211 14:22:08.989599 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z9662" event={"ID":"44a7b31b-09dd-452b-87ba-29764eaa0206","Type":"ContainerStarted","Data":"0e9f2eaec22188530b2e4b668101f76740db88621e2cdc541923a12bc2ca9d08"} Dec 11 14:22:11 crc kubenswrapper[4690]: I1211 14:22:11.501458 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rhblg" Dec 11 14:22:11 crc kubenswrapper[4690]: I1211 14:22:11.502049 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rhblg" Dec 11 14:22:11 crc kubenswrapper[4690]: I1211 14:22:11.556454 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rhblg" Dec 11 14:22:12 crc kubenswrapper[4690]: I1211 14:22:12.056312 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rhblg" Dec 11 14:22:13 crc kubenswrapper[4690]: I1211 14:22:13.954345 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kmbqb" Dec 11 14:22:13 crc kubenswrapper[4690]: I1211 14:22:13.990660 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kmbqb" Dec 11 14:24:29 crc kubenswrapper[4690]: I1211 14:24:29.358384 4690 scope.go:117] "RemoveContainer" containerID="071dcc6b5227134aa7edd407cd17ec42007d5b85e0e5a79d82c2999eb083c682" Dec 11 14:24:34 crc kubenswrapper[4690]: I1211 14:24:34.627866 4690 patch_prober.go:28] interesting pod/machine-config-daemon-z9662 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 14:24:34 crc kubenswrapper[4690]: I1211 14:24:34.628808 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z9662" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 14:25:04 crc kubenswrapper[4690]: I1211 14:25:04.627829 4690 patch_prober.go:28] interesting pod/machine-config-daemon-z9662 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 14:25:04 crc kubenswrapper[4690]: I1211 14:25:04.628737 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z9662" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 14:25:34 crc kubenswrapper[4690]: I1211 14:25:34.627076 4690 patch_prober.go:28] interesting pod/machine-config-daemon-z9662 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 14:25:34 crc kubenswrapper[4690]: I1211 14:25:34.627815 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z9662" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 14:25:34 crc kubenswrapper[4690]: I1211 14:25:34.627868 4690 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-z9662" Dec 11 14:25:34 crc kubenswrapper[4690]: I1211 14:25:34.628493 4690 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0e9f2eaec22188530b2e4b668101f76740db88621e2cdc541923a12bc2ca9d08"} pod="openshift-machine-config-operator/machine-config-daemon-z9662" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 11 14:25:34 crc kubenswrapper[4690]: I1211 14:25:34.628556 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-z9662" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" containerName="machine-config-daemon" containerID="cri-o://0e9f2eaec22188530b2e4b668101f76740db88621e2cdc541923a12bc2ca9d08" gracePeriod=600 Dec 11 14:25:37 crc kubenswrapper[4690]: I1211 14:25:37.174116 4690 generic.go:334] "Generic (PLEG): container finished" podID="44a7b31b-09dd-452b-87ba-29764eaa0206" containerID="0e9f2eaec22188530b2e4b668101f76740db88621e2cdc541923a12bc2ca9d08" exitCode=0 Dec 11 14:25:37 crc kubenswrapper[4690]: I1211 14:25:37.174173 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z9662" event={"ID":"44a7b31b-09dd-452b-87ba-29764eaa0206","Type":"ContainerDied","Data":"0e9f2eaec22188530b2e4b668101f76740db88621e2cdc541923a12bc2ca9d08"} Dec 11 14:25:37 crc kubenswrapper[4690]: I1211 14:25:37.174913 4690 scope.go:117] "RemoveContainer" containerID="1c80cd2a90322545319a0730a69c8f3377cd248081b58abd3a0b769926d09d16" Dec 11 14:25:38 crc kubenswrapper[4690]: I1211 14:25:38.185112 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-z9662" event={"ID":"44a7b31b-09dd-452b-87ba-29764eaa0206","Type":"ContainerStarted","Data":"b828acf0f7f3245094558210194463e3cbbf52f4643d73a0484d8af783f8298a"} Dec 11 14:26:38 crc kubenswrapper[4690]: I1211 14:26:38.524198 4690 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 11 14:28:04 crc kubenswrapper[4690]: I1211 14:28:04.627641 4690 patch_prober.go:28] interesting pod/machine-config-daemon-z9662 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 14:28:04 crc kubenswrapper[4690]: I1211 14:28:04.628993 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z9662" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 11 14:28:34 crc kubenswrapper[4690]: I1211 14:28:34.627379 4690 patch_prober.go:28] interesting pod/machine-config-daemon-z9662 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 11 14:28:34 crc kubenswrapper[4690]: I1211 14:28:34.627927 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-z9662" podUID="44a7b31b-09dd-452b-87ba-29764eaa0206" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515116552456024457 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015116552456017374 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015116550376016516 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015116550376015466 5ustar corecore